Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix_pylint_for_CI #1119

Merged
merged 4 commits into from
Jun 9, 2022
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
run: |
pip install --upgrade pip
pip install pylint
pylint --disable=C0104,C0114,C0115,C0116,C0301,C0302,C0411,C0413,C1802,R0201,R0401,R0801,R0902,R0903,R0911,R0912,R0913,R0914,R0915,R1720,W0105,W0123,W0201,W0511,W0613,W1113,W1514,E0401,E1121,C0103,C0209,R0402,R1705,R1710,R1725,R1735,W0102,W0212,W0221,W0223,W0231,W0237,W0612,W0621,W0622,W0703,W1309,E1102,E1136 --const-rgx='[a-z_][a-z0-9_]{2,30}$' qlib --init-hook "import astroid; astroid.context.InferenceContext.max_inferred = 500"
pylint --disable=C0104,C0114,C0115,C0116,C0301,C0302,C0411,C0413,C1802,C3001,R0401,R0801,R0902,R0903,R0911,R0912,R0913,R0914,R0915,R1720,W0105,W0123,W0201,W0511,W0613,W1113,W1514,E0401,E1121,C0103,C0209,R0402,R1705,R1710,R1725,R1735,W0102,W0212,W0221,W0223,W0231,W0237,W0612,W0621,W0622,W0703,W1309,E1102,E1136 --const-rgx='[a-z_][a-z0-9_]{2,30}$' qlib --init-hook "import astroid; astroid.context.InferenceContext.max_inferred = 500"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fix C3001


# The following flake8 error codes were ignored:
# E501 line too long
Expand Down
2 changes: 1 addition & 1 deletion qlib/contrib/meta/data_selection/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def _prepare_meta_ipt(self, task):
ic_df = self.internal_data.data_ic_df

segs = task["dataset"]["kwargs"]["segments"]
end = max([segs[k][1] for k in ("train", "valid") if k in segs])
end = max(segs[k][1] for k in ("train", "valid") if k in segs)
ic_df_avail = ic_df.loc[:end, pd.IndexSlice[:, :end]]

# meta data set focus on the **information** instead of preprocess
Expand Down
4 changes: 3 additions & 1 deletion qlib/contrib/model/pytorch_hist.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,9 @@ def fit(
pretrained_model.load_state_dict(torch.load(self.model_path))

model_dict = self.HIST_model.state_dict()
pretrained_dict = {k: v for k, v in pretrained_model.state_dict().items() if k in model_dict}
pretrained_dict = {
k: v for k, v in pretrained_model.state_dict().items() if k in model_dict
} # pylint: disable=E1135
model_dict.update(pretrained_dict)
self.HIST_model.load_state_dict(model_dict)
self.logger.info("Loading pretrained model Done...")
Expand Down
4 changes: 2 additions & 2 deletions qlib/contrib/model/pytorch_tra.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ def _init_model(self):
for param in self.tra.predictors.parameters():
param.requires_grad_(False)

self.logger.info("# model params: %d" % sum([p.numel() for p in self.model.parameters() if p.requires_grad]))
self.logger.info("# tra params: %d" % sum([p.numel() for p in self.tra.parameters() if p.requires_grad]))
self.logger.info("# model params: %d" % sum(p.numel() for p in self.model.parameters() if p.requires_grad))
self.logger.info("# tra params: %d" % sum(p.numel() for p in self.tra.parameters() if p.requires_grad))

self.optimizer = optim.Adam(list(self.model.parameters()) + list(self.tra.parameters()), lr=self.lr)

Expand Down
5 changes: 1 addition & 4 deletions qlib/data/dataset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def build_index(data: pd.DataFrame) -> Tuple[pd.DataFrame, dict]:

@property
def empty(self):
return self.__len__() == 0
return self.end_idx - self.start_idx == 0

def _get_indices(self, row: int, col: int) -> np.array:
"""
Expand Down Expand Up @@ -539,9 +539,6 @@ def __getitem__(self, idx: Union[int, Tuple[object, str], List[int]]):
data = data.reshape(-1, self.step_len, *data.shape[1:])
return data

def __len__(self):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fix it

return self.end_idx - self.start_idx


class TSDatasetH(DatasetH):
"""
Expand Down
2 changes: 1 addition & 1 deletion qlib/rl/utils/data_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def __del__(self):
def __iter__(self):
if not self._activated:
raise ValueError(
"Need to call activate() to launch a daemon worker " "to produce data into data queue before using it."
"Need to call activate() to launch a daemon worker, to produce data into data queue before using it."
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fix it

)
return self._consumer()

Expand Down