Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add hpi config #547

Open
wants to merge 1 commit into
base: release_v1.1
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 29 additions & 8 deletions paddlets/models/anomaly/dl/anomaly_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,9 +374,10 @@ def apply_to_static(self, model):
meta_data = self._build_meta()
spec = build_network_input_spec(meta_data)
model = paddle.jit.to_static(model, input_spec=spec)
logger.info("Successfully to apply @to_static with specs: {}".format(spec))
logger.info("Successfully to apply @to_static with specs: {}".format(
spec))
return model

def fit(self,
train_tsdataset: TSDataset,
valid_tsdataset: Optional[TSDataset]=None,
Expand Down Expand Up @@ -426,13 +427,14 @@ def _fit(self,
self._network = self._init_network()
self._optimizer = self._init_optimizer()
if self.use_amp:
logger.info('use AMP to train. AMP level = {}'.format(self.amp_level))
logger.info('use AMP to train. AMP level = {}'.format(
self.amp_level))
self.scaler = paddle.amp.GradScaler(init_loss_scaling=1024)
if self.amp_level == 'O2':
self._network, self._optimizer = paddle.amp.decorate(
models=self._network,
optimizers=self._optimizer,
level='O2')
models=self._network,
optimizers=self._optimizer,
level='O2')
if to_static_train:
self._network = self.apply_to_static(self._network)

Expand Down Expand Up @@ -508,7 +510,7 @@ def _eval(self, tsdataset: TSDataset, **predict_kwargs) -> TSDataset:
"elementwise_add", "batch_norm", "sync_batch_norm"
},
custom_black_list={'bilinear_interp_v2'}):

y_pred, y_true = self._network(data)
else:
y_pred, y_true = self._network(data)
Expand Down Expand Up @@ -700,7 +702,7 @@ def _train_batch(self, X: Dict[str, paddle.Tensor]) -> Dict[str, Any]:
y_pred, y_true = self._network(X)
train_run_cost = time.time() - start_time
loss = self._compute_loss(y_pred, y_true)
scaled_loss = self.scaler.scale(loss)
scaled_loss = self.scaler.scale(loss)
scaled_loss.backward()
self.scaler.step(self._optimizer) # update parameters
self.scaler.update()
Expand Down Expand Up @@ -953,6 +955,25 @@ def save(self,
model_meta.update(data_info)
if model_name is not None:
model_meta['Global'] = {'model_name': model_name}
shapes = [[1, 64, 1], [1, 96, 5]]
paddle_shapes = shapes + [[8, 192, 20]]
tensorrt_shapes = shapes + [[8, 96, 20]]

hpi_config = {
'backend_configs': {
'paddle_infer': {
'trt_dynamic_shapes': {
'ts': paddle_shapes
}
},
'tensorrt': {
'dynamic_shapes': {
'ts': tensorrt_shapes
}
}
}
}
model_meta['Hpi'] = hpi_config
model_meta = convert_and_remove_types(model_meta)
yaml.dump(model_meta, f)
except Exception as e:
Expand Down
58 changes: 38 additions & 20 deletions paddlets/models/classify/dl/paddle_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,7 @@ def _check_params(self):
if not self._eval_metrics:
self._eval_metrics = ["acc"]

def _check_tsdatasets(self,
tsdatasets: List[TSDataset],
def _check_tsdatasets(self, tsdatasets: List[TSDataset],
labels: np.ndarray):
"""Ensure the robustness of input data (consistent feature order), at the same time,
check whether the data types are compatible. If not, the processing logic is as follows.
Expand Down Expand Up @@ -215,8 +214,7 @@ def _init_optimizer(self) -> Optimizer:

else:
return self._optimizer_fn(
**self._optimizer_params,
parameters=self._network.parameters())
**self._optimizer_params, parameters=self._network.parameters())

def _init_fit_dataloaders(
self,
Expand Down Expand Up @@ -261,8 +259,8 @@ def _init_fit_dataloaders(
valid_tsdatasets, valid_labels,
self._fit_params['input_lens'])
else:
valid_dataset = data_adapter.to_paddle_dataset(
valid_tsdatasets, valid_labels)
valid_dataset = data_adapter.to_paddle_dataset(valid_tsdatasets,
valid_labels)
valid_dataloader = data_adapter.to_paddle_dataloader(
valid_dataset, self._batch_size, shuffle=False)

Expand All @@ -284,8 +282,8 @@ def _init_predict_dataloader(
tsdatasets = [tsdatasets]
self._check_tsdatasets(tsdatasets, labels)
data_adapter = ClassifyDataAdapter()
dataset = data_adapter.to_paddle_dataset(
tsdatasets, labels, self._fit_params['input_lens'])
dataset = data_adapter.to_paddle_dataset(tsdatasets, labels,
self._fit_params['input_lens'])
dataloader = data_adapter.to_paddle_dataloader(
dataset, self._batch_size, shuffle=False)
return dataloader
Expand Down Expand Up @@ -350,9 +348,10 @@ def apply_to_static(self, model):
meta_data = self._build_meta()
spec = build_network_input_spec(meta_data)
model = paddle.jit.to_static(model, input_spec=spec)
logger.info("Successfully to apply @to_static with specs: {}".format(spec))
logger.info("Successfully to apply @to_static with specs: {}".format(
spec))
return model

def fit(self,
train_tsdatasets: List[TSDataset],
train_labels: np.ndarray,
Expand Down Expand Up @@ -394,17 +393,18 @@ def _fit(self,
self._history, self._callback_container = self._init_callbacks()
self._network = self._init_network()
self._optimizer = self._init_optimizer()
if self.use_amp :
logger.info('use AMP to train. AMP level = {}'.format(self.amp_level))
if self.use_amp:
logger.info('use AMP to train. AMP level = {}'.format(
self.amp_level))
self.scaler = paddle.amp.GradScaler(init_loss_scaling=1024)
if self.amp_level == 'O2':
self._network, self._optimizer = paddle.amp.decorate(
models=self._network,
optimizers=self._optimizer,
level='O2')
models=self._network,
optimizers=self._optimizer,
level='O2')
if to_static_train:
self._network = self.apply_to_static(self._network)

check_random_state(self._seed)

# Call the `on_train_begin` method of each callback before the training starts.
Expand Down Expand Up @@ -447,8 +447,7 @@ def predict(
# np.save('probs',probs)
rng = check_random_state(self._seed)
return np.array([
self._classes_[int(
rng.choice(np.flatnonzero(prob == prob.max())))]
self._classes_[int(rng.choice(np.flatnonzero(prob == prob.max())))]
for prob in probs
])

Expand Down Expand Up @@ -548,7 +547,7 @@ def _train_batch(self, X: Dict[str, paddle.Tensor],
output = self._network(X)
train_run_cost = time.time() - start_time
loss = self._compute_loss(output, y)
scaled_loss = self.scaler.scale(loss)
scaled_loss = self.scaler.scale(loss)
scaled_loss.backward()
self.scaler.step(self._optimizer) # update parameters
self.scaler.update()
Expand All @@ -559,7 +558,7 @@ def _train_batch(self, X: Dict[str, paddle.Tensor],
"loss": loss.item(),
"train_run_cost": train_run_cost
}

else:
output = self._network(X)
train_run_cost = time.time() - start_time
Expand Down Expand Up @@ -793,6 +792,25 @@ def save(self,
model_meta.update(data_info)
if model_name is not None:
model_meta['Global'] = {'model_name': model_name}
shapes = [[1, 64, 1], [1, 96, 5]]
paddle_shapes = shapes + [[8, 192, 20]]
tensorrt_shapes = shapes + [[8, 96, 20]]

hpi_config = {
'backend_configs': {
'paddle_infer': {
'trt_dynamic_shapes': {
'ts': paddle_shapes
}
},
'tensorrt': {
'dynamic_shapes': {
'ts': tensorrt_shapes
}
}
}
}
model_meta['Hpi'] = hpi_config
model_meta = convert_and_remove_types(model_meta)
yaml.dump(model_meta, f)
except Exception as e:
Expand Down
19 changes: 19 additions & 0 deletions paddlets/models/forecasting/dl/paddle_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,25 @@ def save(self,
model_meta.update(data_info)
if model_name is not None:
model_meta['Global'] = {'model_name': model_name}
shapes = [[1, 64, 1], [1, 96, 5]]
paddle_shapes = shapes + [[8, 192, 20]]
tensorrt_shapes = shapes + [[8, 96, 20]]

hpi_config = {
'backend_configs': {
'paddle_infer': {
'trt_dynamic_shapes': {
'ts': paddle_shapes
}
},
'tensorrt': {
'dynamic_shapes': {
'ts': tensorrt_shapes
}
}
}
}
model_meta['Hpi'] = hpi_config
model_meta = convert_and_remove_types(model_meta)
yaml.dump(model_meta, f)
except Exception as e:
Expand Down