diff --git a/tone/core/cloud/aliyun/eci_driver.py b/tone/core/cloud/aliyun/eci_driver.py index 5086f471658a65240bdea2f88d51b72e3344d11c..5e1eb209a7c49af9449ab8de3313be8edf225212 100644 --- a/tone/core/cloud/aliyun/eci_driver.py +++ b/tone/core/cloud/aliyun/eci_driver.py @@ -116,4 +116,3 @@ class EciDriver(BaseDriver): raise AliYunException(msg=e.get_error_msg(), code=e.get_error_code()) finally: AliYunNetwork(self.access_id, self.access_key, self.region, self.zone).release_eip_address(eip_addr) - diff --git a/tone/core/common/callback.py b/tone/core/common/callback.py index ccf6292f08c2a8646fa8cb0d3546f682e13fc3ec..6c1bdaa35134cab62cc37e6c42f20ac733e48633 100644 --- a/tone/core/common/callback.py +++ b/tone/core/common/callback.py @@ -45,7 +45,7 @@ class JobCallBack(object): log_info = f'api:{self.api} | method:{self.method} | ' \ f'callback_type:{self.callback_type} | ' \ f'data:{self.data} | result:{success},{res}' - logger.info(f'--- callback request info ---') + logger.info('--- callback request info ---') logger.info(log_info) return success, res diff --git a/tone/core/common/job_result_helper.py b/tone/core/common/job_result_helper.py index c5e4a32386449ccfc117aa82c8f108ff87c57262..c4796dbf46d5a5ba3bb181d7faaf6c4f65432037 100644 --- a/tone/core/common/job_result_helper.py +++ b/tone/core/common/job_result_helper.py @@ -41,8 +41,8 @@ def calc_job(job_id): na = total - count_dict.get('increase_count', 0) - count_dict.get('decline_count', 0) - count_dict.get( 'normal_count', 0) - count_dict.get('invalid_count', 0) result = {'count': total, 'increase': count_dict.get('increase_count', 0), 'decline': - count_dict.get('decline_count', 0), 'normal': count_dict.get('normal_count', 0), 'invalid': - count_dict.get('invalid_count', 0), 'na': na} + count_dict.get('decline_count', 0), 'normal': count_dict.get('normal_count', 0), 'invalid': + count_dict.get('invalid_count', 0), 'na': na} elif test_type == BUSINESS: job_case_queryset = TestJobCase.objects.filter(job_id=job_id) count = len(job_case_queryset) @@ -87,7 +87,7 @@ def count_prefresult_state_num(job_id, test_suite_id=None, test_case_id=None): cursor.execute(search_sql) count = cursor.fetchall() for i in count: - count_dict[str(i[0])+'_count'] = i[1] + count_dict[str(i[0]) + '_count'] = i[1] total = sum(count_dict.values()) return count_dict, total @@ -123,8 +123,8 @@ def count_funcresult_state_num(job_id, test_suite_id=None, test_case_id=None): count = cursor.fetchall() for i in count: count_dict[str(i[0])] = i[1] - total = count_dict.get("1", 0) + count_dict.get("2", 0) + count_dict.get("3", 0) + count_dict.get("4", 0) + \ - count_dict.get("5", 0) + count_dict.get("6", 0) + total = count_dict.get("1", 0) + count_dict.get("2", 0) + count_dict.get("3", 0) + count_dict.\ + get("4", 0) + count_dict.get("5", 0) + count_dict.get("6", 0) return count_dict, total @@ -215,7 +215,7 @@ def parse_func_result_v1(job_id, sub_case_result, match_baseline): SELECT COUNT(1) FROM func_result WHERE test_job_id=%s AND sub_case_result=%s AND match_baseline=%s AND {} """.format(id_sql, id_sql, id_sql) result = execute_sql(search_sql, [job_id, job_id, job_id, job_id, sub_case_result, job_id, sub_case_result, - match_baseline]) + match_baseline]) if result and len(result) == 5: job_case_count, result_case_count, count_total, count_fail, count_no_match_baseline = \ result[0][0], result[1][0], result[2][0], result[3][0], result[4][0] @@ -742,7 +742,7 @@ def _get_cases_by_job_job_suite(test_job_id, job_cases, job_suite, detail_server 'test_case': test_case_name, 'setup_info': case.setup_info, 'cleanup_info': case.cleanup_info, - 'server_ip': ip, + 'server_ip': ip if ip else '随机', 'server_id': server_info.id if server_info else None, 'server_description': get_job_case_run_server(case.id, return_field='description'), 'is_instance': is_instance, @@ -1214,8 +1214,8 @@ def get_suite_conf_metric_v1(suite_id, suite_name, base_index, group_list, suite thread_tasks = [] for case_info in case_list: if base_is_baseline: - case_result_list = [result for result in baseline_result_list if result['test_case_id'] == - case_info['conf_id']] + case_result_list = [result for result in baseline_result_list + if result['test_case_id'] == case_info['conf_id']] else: case_result_list = [result for result in job_result_list if result['test_case_id'] == case_info['conf_id']] for base_job_id in base_job_list.get('job_list'): @@ -1652,8 +1652,8 @@ def get_job_state(test_job_id, test_type, state, func_view_config, state_second, if runner_version == 2: state = state_second else: - job_case_count, result_case_count, count_total, count_fail, \ - count_no_match_baseline = parse_func_result_v1(test_job_id, 2, 0) + job_case_count, result_case_count, count_total, count_fail, count_no_match_baseline = \ + parse_func_result_v1(test_job_id, 2, 0) if count_total == 0 or job_case_count != result_case_count: state = 'fail' return state diff --git a/tone/core/common/toneagent.py b/tone/core/common/toneagent.py index f20055de1d9e3285e4032f247ee2b0a996be14a1..8156a09e8cbf7035a343411539a42b6113ab8536 100644 --- a/tone/core/common/toneagent.py +++ b/tone/core/common/toneagent.py @@ -48,7 +48,7 @@ class ToneAgentRequest(object): def request(self, api, data, method='post'): self._data.update(data) self._sign() - url = '{domain}/{api}'.format(domain=self._domain, json=data, api=api) + url = '{domain}/{api}'.format(domain=self._domain, api=api) data = self._data try: if method == 'get': @@ -127,7 +127,6 @@ class QueryTaskRequest(ToneAgentRequest): return self.request(self._api, self._request_data) - class RemoveAgentRequest(ToneAgentRequest): def __init__(self, access_key, secret_key): self._api = 'api/agent/remove' diff --git a/tone/core/handle/__init__.py b/tone/core/handle/__init__.py index 32c3d46e76f724944e0039a527b9a6bbf47bda0f..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 --- a/tone/core/handle/__init__.py +++ b/tone/core/handle/__init__.py @@ -1,6 +0,0 @@ -# _*_ coding:utf-8 _*_ -""" -Module Description: -Date: -Author: Yfh -""" \ No newline at end of file diff --git a/tone/core/handle/job_handle.py b/tone/core/handle/job_handle.py index 62da51d56aa387d232bc9e398e0035642c4859ea..1bf9945352f45d7c28bcbd5c5be813e5fe2cc73c 100644 --- a/tone/core/handle/job_handle.py +++ b/tone/core/handle/job_handle.py @@ -353,8 +353,8 @@ class JobDataHandle(BaseHandle): 'monitor_info': case.monitor_info, 'priority': case.priority, }) - for case in template_cases if TestCase.objects.filter(id=case.test_case_id).exists() and - TestSuite.objects.filter(id=case.test_suite_id).exists()] + for case in template_cases if TestCase.objects.filter(id=case.test_case_id).exists() and TestSuite. + objects.filter(id=case.test_suite_id).exists()] elif self.data_from == 'import': test_config = self.data.get('test_config') assert test_config, JobTestException(ErrorCode.TEST_CONF_NEED) diff --git a/tone/core/handle/report_handle.py b/tone/core/handle/report_handle.py index 6b116b569a5dbeeb74234fe44a14ebc0b45b32d9..dfa8b49aafadbbedf0587b37d9dada0e9319bc64 100644 --- a/tone/core/handle/report_handle.py +++ b/tone/core/handle/report_handle.py @@ -1010,64 +1010,6 @@ def get_item_data_list(tmpl_id, test_type): return pack_item_data(item_data_map) -def pack_group_name(item_name_list, pack_data): - if len(item_name_list) > 1: - if 'group_data' in pack_data: - pack_data['group_data'] = {'group_name': item_name_list[0], - 'group_data': None} - else: - pack_data = {'group_name': item_name_list[0], - 'group_data': {}} - return self.pack_group_name(item_name_list[1:], pack_data) - else: - if 'group_data' in pack_data: - pack_data['group_data'] = {'group_name': item_name_list[0], - 'group_data': {'item_name': item_name_list[0], - 'test_suite_list': []}} - else: - pack_data = {'name': item_name_list[0], - 'test_suite_list': []} - return pack_data - - -def trans_data(group_name_map): - result = list() - parent_dic = {} - for group_name in group_name_map.keys(): - # 多级分组, 目前支持到三级 - if ':' in group_name: - parent_name = group_name.split(':')[0] - son_name = ':'.join(group_name.split(':')[1:]) - if parent_name in parent_dic: - parent_dic[parent_name].append({ - son_name: group_name_map[group_name] - }) - else: - parent_dic[parent_name] = [{ - son_name: group_name_map[group_name] - }] - else: - result.append({ - 'name': group_name, - 'is_group': True, - 'list': group_name_map[group_name] - }) - for tmp in parent_dic: - tmp_dict = {'name': tmp, - 'is_group': True, - 'list': []} - son_list = parent_dic[tmp] - for son_tmp in son_list: - son_name = list(son_tmp.keys())[0] - tmp_dict['list'].append({ - 'name': son_name, - 'is_group': True, - 'list': son_tmp[son_name] - }) - result.append(tmp_dict) - return result - - def init_list(son_list, new_list): for son in son_list: if son.son_list: diff --git a/tone/core/schedule/schedule_job.py b/tone/core/schedule/schedule_job.py index f0094792fede804739f954a3ce541dbe0da5d3ed..e26b1c26721de35b8016efbee9030422bcd62471 100644 --- a/tone/core/schedule/schedule_job.py +++ b/tone/core/schedule/schedule_job.py @@ -173,7 +173,7 @@ def auto_plan_report(): except Exception as ex: logger.error(f'auto_plan_report error. ex is {ex}') PlanInstance.objects.filter(id=plan_instance.id).update(report_is_saved=1) - logger.info(f'auto_plan_report end now ...........') + logger.info('auto_plan_report end now ...........') def batch_create_job(): @@ -210,10 +210,10 @@ def clear_timeout_job(): job_id_list = [job_id['id'] for job_id in job_res_list] if job_id_list: with transaction.atomic(): - TestJob.objects.filter(id__in=job_id_list).delete(really_delete=True) - PerfResult.objects.filter(test_job_id__in=job_id_list).delete(really_delete=True) - FuncResult.objects.filter(test_job_id__in=job_id_list).delete(really_delete=True) - JobTagRelation.objects.filter(job_id__in=job_id_list).delete(really_delete=True) - TestStep.objects.filter(job_id__in=job_id_list).delete(really_delete=True) - TestJobCase.objects.filter(job_id__in=job_id_list).delete(really_delete=True) - TestJobSuite.objects.filter(job_id__in=job_id_list).delete(really_delete=True) + TestJob.objects.filter(id__in=job_id_list, query_scope='all').delete(really_delete=True) + PerfResult.objects.filter(test_job_id__in=job_id_list, query_scope='all').delete(really_delete=True) + FuncResult.objects.filter(test_job_id__in=job_id_list, query_scope='all').delete(really_delete=True) + JobTagRelation.objects.filter(job_id__in=job_id_list, query_scope='all').delete(really_delete=True) + TestStep.objects.filter(job_id__in=job_id_list, query_scope='all').delete(really_delete=True) + TestJobCase.objects.filter(job_id__in=job_id_list, query_scope='all').delete(really_delete=True) + TestJobSuite.objects.filter(job_id__in=job_id_list, query_scope='all').delete(really_delete=True) diff --git a/tone/core/utils/common_utils.py b/tone/core/utils/common_utils.py index 845ad560d308b2d044bd446ca4606e2853902d90..a142202c3164cf38d388bfc520d565222d54a04a 100644 --- a/tone/core/utils/common_utils.py +++ b/tone/core/utils/common_utils.py @@ -115,7 +115,7 @@ def execute_sql(sql, params=None): cursor.execute(sql) rows = cursor.fetchall() return rows - except Exception as e: + except Exception: raise ValueError(ErrorCode.ILLEGALITY_PARAM_ERROR) @@ -124,17 +124,19 @@ def kernel_info_format(kernel_info): # 原数据结构:{"kernel": "a.rpm", "devel": "b.rpm", "headers": "c.rpm", "hotfix_install": true} # 新数据结构:{"kernel_packages": ["a.rpm", "b.rpm", "c.rpm"], "hotfix_install": true} if not kernel_info or kernel_info.get('kernel_packages'): + if kernel_info.get('kernel_packages'): + kernel_info['kernel_packages'] = [kp.strip() for kp in kernel_info.get('kernel_packages')] return kernel_info new_kernel_info = {'kernel_packages': []} if kernel_info.get('kernel'): - new_kernel_info['kernel_packages'].append(kernel_info.get('kernel')) + new_kernel_info['kernel_packages'].append(kernel_info.get('kernel').strip()) if kernel_info.get('devel'): - new_kernel_info['kernel_packages'].append(kernel_info.get('devel')) + new_kernel_info['kernel_packages'].append(kernel_info.get('devel').strip()) if kernel_info.get('headers'): - new_kernel_info['kernel_packages'].append(kernel_info.get('headers')) - for name,value in kernel_info.items(): + new_kernel_info['kernel_packages'].append(kernel_info.get('headers').strip()) + for name, value in kernel_info.items(): if name not in ['kernel', 'devel', 'headers']: - new_kernel_info[name] = value + new_kernel_info[name] = value.strip() return new_kernel_info diff --git a/tone/core/utils/config_parser.py b/tone/core/utils/config_parser.py index 1afe71c764a4aed48912fe799ab088efcc735bec..96b186175c34d121b3e3379681f7040477f85c80 100644 --- a/tone/core/utils/config_parser.py +++ b/tone/core/utils/config_parser.py @@ -72,10 +72,10 @@ def get_config_from_db(key, default=''): def get_chinese_stopwords(): try: - doc_file1 = f'static/config/chinese_stopwords.txt' + doc_file1 = 'static/config/chinese_stopwords.txt' with open(doc_file1, 'r', encoding='utf-8') as f: stopword_set = set([line.strip() for line in f]) - except Exception as err: + except Exception: stopword_set = set() return stopword_set diff --git a/tone/core/utils/helper.py b/tone/core/utils/helper.py index 22bebd21b019ba6db4b591b9c957dda7182eb7eb..e269cb1524cd8a17baecf73f935ce9881f4d5dc7 100644 --- a/tone/core/utils/helper.py +++ b/tone/core/utils/helper.py @@ -192,7 +192,7 @@ class BaseObject(object): result_dict[k] = [] elif type(v) in (float, int, list, dict, None, tuple, str, str, bool, int) or v is None: result_dict[k] = v - elif type(v) == datetime: + elif type(v) is datetime: result_dict[k] = DateUtil.datetime_to_str(v) elif isinstance(v, ObjectDict): result_dict[k] = v diff --git a/tone/core/utils/permission_manage.py b/tone/core/utils/permission_manage.py index f135551383a882ba1819dd00465a0d7f92b6d9fd..68083e86ba0fb320cca1ad8214e90562185ef74e 100644 --- a/tone/core/utils/permission_manage.py +++ b/tone/core/utils/permission_manage.py @@ -24,7 +24,7 @@ def check_admin_operator_permission(user_name): sys_role_id = RoleMember.objects.get(user_id=user.id).role_id sys_role = Role.objects.get(id=sys_role_id).title if sys_role not in ['super_admin', 'sys_admin', 'sys_test_admin']: - return False + return False return True diff --git a/tone/migrations/0028_auto_20240722_1606.py b/tone/migrations/0028_auto_20240722_1606.py new file mode 100644 index 0000000000000000000000000000000000000000..7e568f1277fb3bd0bcab4d5863646f0e786feb5f --- /dev/null +++ b/tone/migrations/0028_auto_20240722_1606.py @@ -0,0 +1,105 @@ +# Generated by Django 3.2.5 on 2024-07-22 16:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('tone', '0027_auto_20231114_1456'), + ] + + operations = [ + migrations.CreateModel( + name='ChatsAnswer', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('reason', models.CharField(help_text='原因', max_length=1024)), + ('answer', models.TextField(help_text='答案')), + ('right_number', models.IntegerField(default=0, help_text='满意次数')), + ('wrong_number', models.IntegerField(default=0, help_text='不满意次数')), + ('problem_type', models.CharField(db_index=True, default='其它', help_text='问题类型', max_length=64)), + ('problem_attribution', models.CharField(db_index=True, default='Other', help_text='问题归属', max_length=64)), + ('enable', models.BooleanField(db_index=True, default=True, help_text='启用状态')), + ], + options={ + 'db_table': 'chats_answer', + }, + ), + migrations.CreateModel( + name='ChatsCollection', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('contents', models.CharField(help_text='问题描述', max_length=1024)), + ('status', models.CharField(choices=[('Init', 'Init'), ('Ignore', 'Ignore'), ('Accept', 'Accept')], db_index=True, default='Init', help_text='状态', max_length=64)), + ('creator', models.IntegerField(help_text='创建者', null=True)), + ('contents_sources', models.CharField(choices=[('Collect', '问题收集'), ('Suggest', '意见反馈')], db_index=True, default='Collect', help_text='问题来源', max_length=64)), + ('is_answered', models.BooleanField(db_index=True, default=False, help_text='是否有答案')), + ], + options={ + 'db_table': 'chats_collection', + }, + ), + migrations.CreateModel( + name='ChatsKeyword', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('keyword', models.CharField(db_index=True, help_text='问题关键字', max_length=512)), + ], + options={ + 'db_table': 'chats_keyword', + }, + ), + migrations.CreateModel( + name='ChatsProblem', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('problem', models.CharField(help_text='问题描述', max_length=1024)), + ('creator', models.IntegerField(help_text='创建者', null=True)), + ('level', models.IntegerField(db_index=True, default=1, help_text='推荐优先级')), + ], + options={ + 'db_table': 'chats_problem', + }, + ), + migrations.CreateModel( + name='ChatsProblemAnswerRelation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('problem_id', models.IntegerField(db_index=True, help_text='关联问题ID')), + ('answer_id', models.IntegerField(db_index=True, help_text='关联答案ID')), + ], + options={ + 'db_table': 'chats_problem_answer_relation', + }, + ), + migrations.CreateModel( + name='ChatsProblemKeywordRelation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('gmt_created', models.DateTimeField(auto_now_add=True, help_text='创建时间', verbose_name='create_at')), + ('gmt_modified', models.DateTimeField(auto_now=True, help_text='修改时间', verbose_name='modify_at')), + ('is_deleted', models.BooleanField(db_index=True, default=False, help_text='是否被删除')), + ('problem_id', models.IntegerField(db_index=True, help_text='关联问题ID')), + ('keyword_id', models.IntegerField(db_index=True, help_text='关联问题关键字ID')), + ], + options={ + 'db_table': 'chats_problem_keyword_relation', + }, + ), + ] diff --git a/tone/models/sys/chats_models.py b/tone/models/sys/chats_models.py index 4bbd97f0779084a28beeb5bc6b013e12c13c637a..fae5940d954279d808024d48aa3fea76fe136c1d 100644 --- a/tone/models/sys/chats_models.py +++ b/tone/models/sys/chats_models.py @@ -53,10 +53,10 @@ class ChatsProblemKeywordRelation(BaseModel): class ChatsCollection(BaseModel): contents = models.CharField(max_length=1024, help_text='问题描述') status = models.CharField(max_length=64, choices=ContentEnums.PROBLEM_STATUS_CHOICES, default='Init', - help_text='状态', db_index=True) + help_text='状态', db_index=True) creator = models.IntegerField(help_text='创建者', null=True) contents_sources = models.CharField(max_length=64, choices=ContentEnums.PROBLEM_SOURCES_CHOICES, - default='Collect', help_text='问题来源', db_index=True) + default='Collect', help_text='问题来源', db_index=True) is_answered = models.BooleanField(default=False, help_text='是否有答案', db_index=True) class Meta: diff --git a/tone/models/sys/server_models.py b/tone/models/sys/server_models.py index fe08c463e1e6d3da24e2c4129ddfd726fb45ffca..bf97d9b1c5f0730cd044af4220f70a8e8898ad86 100644 --- a/tone/models/sys/server_models.py +++ b/tone/models/sys/server_models.py @@ -91,8 +91,7 @@ class CloudServer(BaseModel): extra_param = json.JSONField(default=dict(), help_text='扩展信息') sn = models.CharField(max_length=64, null=True, help_text='SN') tsn = models.CharField(max_length=64, null=True, help_text='TSN') - release_rule = models.IntegerField(default=1, help_text='用完释放', - choices=TestServerEnums.RELEASE_RULE_CHOICES) + release_rule = models.IntegerField(default=1, help_text='用完释放', choices=TestServerEnums.RELEASE_RULE_CHOICES) # 模板 template_name = models.CharField(max_length=64, help_text='模板名称') diff --git a/tone/serializers/job/report_serializers.py b/tone/serializers/job/report_serializers.py index 02fe06ca019926d2326181f0c091e6e7862dc78e..9656bcaab21f038afeaded32b68e143f1c0434de 100644 --- a/tone/serializers/job/report_serializers.py +++ b/tone/serializers/job/report_serializers.py @@ -5,7 +5,9 @@ Date: Author: """ -from tone.core.handle.report_handle import * +from datetime import datetime +from tone.core.handle.report_handle import get_template_conf, get_template_func_item, get_template_perf_item, \ + get_perf_data, get_func_data, get_report_template, get_old_report from rest_framework import serializers from tone.core.common.serializers import CommonSerializer from tone.models import User, Project, CompareForm, ReportDetail, BaseConfig, ReportItem diff --git a/tone/serializers/job/test_serializers.py b/tone/serializers/job/test_serializers.py index 5a0eb850220284ed06d00b238b2f1872d2eea64a..cfcec5211d7f2f427e2acca442e4714caf0737dd 100644 --- a/tone/serializers/job/test_serializers.py +++ b/tone/serializers/job/test_serializers.py @@ -331,8 +331,8 @@ class JobTestProcessSuiteSerializer(CommonSerializer): 'result': step.result, 'tid': step.tid, 'gmt_created': datetime.datetime.strftime(step.gmt_created, "%Y-%m-%d %H:%M:%S"), - 'gmt_modified': datetime.datetime.strftime(step.gmt_modified, - "%Y-%m-%d %H:%M:%S") if step.state != 'running' else None + 'gmt_modified': datetime.datetime.strftime(step.gmt_modified, "%Y-%m-%d %H:%M:%S") + if step.state != 'running' else None }) return step_li @@ -519,8 +519,8 @@ class JobTestResultSerializer(CommonSerializer): 'suite_name': test_suite.name, 'test_type': test_type, 'note': suite.note, - 'start_time': datetime.datetime.strftime(suite.start_time, - "%Y-%m-%d %H:%M:%S") if suite.start_time else None, + 'start_time': datetime.datetime.strftime(suite.start_time, "%Y-%m-%d %H:%M:%S") + if suite.start_time else None, 'end_time': datetime.datetime.strftime(suite.end_time, "%Y-%m-%d %H:%M:%S") if suite.end_time else None, 'creator': test_job.creator, 'business_name': business_name, @@ -561,8 +561,8 @@ class JobTestResultSerializer(CommonSerializer): 'suite_name': test_suite.name, 'test_type': test_type, 'note': suite.note, - 'start_time': datetime.datetime.strftime(suite.start_time, - "%Y-%m-%d %H:%M:%S") if suite.start_time else None, + 'start_time': datetime.datetime.strftime(suite.start_time, "%Y-%m-%d %H:%M:%S") + if suite.start_time else None, 'end_time': datetime.datetime.strftime(suite.end_time, "%Y-%m-%d %H:%M:%S") if suite.end_time else None, 'creator': test_job.creator, 'business_name': business_name @@ -721,12 +721,18 @@ class JobTestCaseResultSerializer(CommonSerializer): sub_case_name=obj.sub_case_name, impact_result=True).first() if func_detail is not None: + baseline_ids = FuncBaselineDetail.objects.filter(source_job_id=obj.test_job_id, + test_suite_id=obj.test_suite_id, + test_case_id=obj.test_case_id, + sub_case_name=obj.sub_case_name). \ + values_list('baseline_id', flat=True) return {'server_provider': baseline_obj.server_provider, 'test_type': baseline_obj.test_type, 'test_suite_id': obj.test_suite_id, 'test_case_id': obj.test_case_id, - 'baseline_id': baseline_id, - 'sub_case_name': obj.sub_case_name + 'baseline_id': baseline_ids, + 'sub_case_name': obj.sub_case_name, + 'impact_result': func_detail.impact_result } else: func_detail = FuncBaselineDetail.objects.filter(source_job_id=obj.test_job_id, @@ -735,14 +741,20 @@ class JobTestCaseResultSerializer(CommonSerializer): sub_case_name=obj.sub_case_name, impact_result=True).first() if func_detail is not None: + baseline_ids = FuncBaselineDetail.objects.filter(source_job_id=obj.test_job_id, + test_suite_id=obj.test_suite_id, + test_case_id=obj.test_case_id, + sub_case_name=obj.sub_case_name). \ + values_list('baseline_id', flat=True) baseline_obj = Baseline.objects.filter(id=func_detail.baseline_id).first() if baseline_obj is not None: return {'server_provider': baseline_obj.server_provider, 'test_type': baseline_obj.test_type, 'test_suite_id': obj.test_suite_id, 'test_case_id': obj.test_case_id, - 'baseline_id': func_detail.baseline_id, - 'sub_case_name': obj.sub_case_name + 'baseline_id': baseline_ids, + 'sub_case_name': obj.sub_case_name, + 'impact_result': func_detail.impact_result } @staticmethod diff --git a/tone/services/job/offline_data_services.py b/tone/services/job/offline_data_services.py index ce7b31831321518cb177a823e9404193237c7f18..94ecf27e854d88793be8e0d7ee185e6628311a8a 100644 --- a/tone/services/job/offline_data_services.py +++ b/tone/services/job/offline_data_services.py @@ -20,7 +20,7 @@ from tone.core.utils.sftp_client import sftp_client from tone.models.job.upload_models import OfflineUpload from tone.models.job.job_models import TestJob, TestJobCase, TestJobSuite, TestStep, Project from tone.models.sys.testcase_model import TestSuite, TestCase, TestMetric -from tone.models.sys.server_models import TestServer, TestServerSnapshot, CloudServer, CloudServerSnapshot,\ +from tone.models.sys.server_models import TestServer, TestServerSnapshot, CloudServer, CloudServerSnapshot, \ TestCluster, TestClusterServer from tone.models.job.result_models import FuncResult, PerfResult, ResultFile from tone.models.sys.baseline_models import PerfBaselineDetail @@ -28,6 +28,7 @@ from tone.core.common.constant import OFFLINE_DATA_DIR, RESULTS_DATA_DIR from tone.settings import MEDIA_ROOT from tone.services.job.test_services import JobTestService from tone.core.common.job_result_helper import get_test_config, patch_job_state +from django.conf import settings class OfflineDataUploadService(object): @@ -56,8 +57,8 @@ class OfflineDataUploadService(object): if projects: q &= Q(project_id__in=list(projects)) if data.get('start_time') and data.get('end_time'): - end_time = (datetime.datetime.strptime(data.get('end_time'), "%Y-%m-%d") + - datetime.timedelta(days=1)).strftime("%Y-%m-%d") + end_time = (datetime.datetime.strptime(data.get('end_time'), "%Y-%m-%d") + datetime.timedelta(days=1)). \ + strftime("%Y-%m-%d") q &= Q(gmt_created__range=(data.get('start_time'), end_time)) return queryset.filter(q) diff --git a/tone/services/job/result_analysis_services.py b/tone/services/job/result_analysis_services.py index 2f01c7dc96ef4b068483ebabb36a2bc9378c2a89..fe8419fbb21a3c972850b26898162f9afe2d94f5 100644 --- a/tone/services/job/result_analysis_services.py +++ b/tone/services/job/result_analysis_services.py @@ -303,22 +303,22 @@ class PerfAnalysisService(CommonService): def package_job_info(job_value, job_id_list, job_list, provider_env): if job_value and job_value.get('job_id') not in job_id_list: job_list.append({ - 'job_id': job_value.get('job_id'), - 'job_name': job_value.get('job_name'), - 'start_time': job_value.get('start_time'), - 'end_time': job_value.get('end_time'), - 'commit_id': job_value.get('commit_id'), - 'creator': job_value.get('creator'), - 'server': job_value.get('server'), - 'server_id': job_value.get('server_id'), - 'exists': job_value.get('exists'), - 'value': job_value.get('value'), - 'cv_value': job_value.get('cv_value'), - 'note': job_value.get('note'), - 'result_obj_id': job_value.get('result_obj_id'), - 'creator_id': job_value.get('creator_id'), - 'server_provider': provider_env - }) + 'job_id': job_value.get('job_id'), + 'job_name': job_value.get('job_name'), + 'start_time': job_value.get('start_time'), + 'end_time': job_value.get('end_time'), + 'commit_id': job_value.get('commit_id'), + 'creator': job_value.get('creator'), + 'server': job_value.get('server'), + 'server_id': job_value.get('server_id'), + 'exists': job_value.get('exists'), + 'value': job_value.get('value'), + 'cv_value': job_value.get('cv_value'), + 'note': job_value.get('note'), + 'result_obj_id': job_value.get('result_obj_id'), + 'creator_id': job_value.get('creator_id'), + 'server_provider': provider_env + }) job_id_list.append(job_value.get('job_id')) diff --git a/tone/services/job/tag_services.py b/tone/services/job/tag_services.py index de6d9062bc01786789f61bc154fa1c5d057b59af..fa1403c71b726ad1d78cebb7f0dacf896ce6be39 100644 --- a/tone/services/job/tag_services.py +++ b/tone/services/job/tag_services.py @@ -25,6 +25,7 @@ class JobTagService(CommonService): q &= Q(update_user=data.get('update_user')) if data.get('update_user') else q q &= Q(tag_color=data.get('tag_color')) if data.get('tag_color') else q q &= Q(description__icontains=data.get('description')) if data.get('description') else q + q &= Q(source_tag=data.get('source_tag')) if data.get('source_tag') else q return sorted(queryset.filter(q), key=lambda x: (0 if not x.creator else 1, -x.id)) def update(self, data, operator): diff --git a/tone/services/job/test_services.py b/tone/services/job/test_services.py index 3f6cacef0fe2e65e132c70e61d462c244880815f..4ed61e0a77b7aaeb4e13914b3b08480f15bbfe9f 100644 --- a/tone/services/job/test_services.py +++ b/tone/services/job/test_services.py @@ -21,7 +21,7 @@ from threading import Thread from tone import settings from tone.core.common.callback import CallBackType, JobCallBack -from tone.core.utils.common_utils import query_all_dict +from tone.core.utils.common_utils import query_all_dict, execute_sql from tone.core.common.enums.job_enums import JobCaseState, JobState from tone.core.common.enums.ts_enums import TestServerState from tone.core.common.info_map import get_result_map @@ -117,15 +117,19 @@ class JobTestService(CommonService): collection_jobs = JobCollection.objects.filter(user_id=operator) collect_job_set = set(collection_jobs.values_list('job_id', flat=True)) query_sql = [] + sql_params = [] if data.get('tab') == 'my' or data.get('my_job'): - query_sql.append('AND creator="{}"'.format(operator)) + query_sql.append('AND creator=%s') + sql_params.append(operator) if data.get('tab') == 'collection' or data.get('collection'): if collect_job_set: - query_sql.append('AND id IN ({})'.format(','.join(str(job_id) for job_id in collect_job_set))) + query_sql.append('AND id IN %s') + sql_params.append(tuple([job_id for job_id in collect_job_set])) else: return res, 0 if data.get('name'): - query_sql.append('AND name LIKE "%{}%"'.format(data.get('name').replace('_', '\_').replace('%', '\%'))) + query_sql.append('AND name LIKE %s') + sql_params.append('%' + data.get('name').replace('_', '\_').replace('%', '\%') + '%') if data.get('job_id'): job_id = data.get('job_id') if isinstance(job_id, int): @@ -133,7 +137,8 @@ class JobTestService(CommonService): else: if not job_id.isdigit(): job_id = 0 - query_sql.append('AND id="{}"'.format(job_id)) + query_sql.append('AND id=%s') + sql_params.append(str(job_id)) if data.get('state'): input_state_list = data.get('state').split(',') if 'pass' in input_state_list: @@ -142,9 +147,11 @@ class JobTestService(CommonService): if 'pending' in state_list: state_list.append('pending_q') if len(state_list) == 1: - query_sql.append('AND state="{}"'.format(state_list[0])) + query_sql.append('AND state=%s') + sql_params.append(state_list[0]) else: - query_sql.append('AND state IN {}'.format(tuple(state_list))) + query_sql.append('AND state IN %s') + sql_params.append(tuple(state_list)) if data.get('search'): # 模糊搜索只包含以下维度:job_id, job_name, 创建人名字,job类型 search = data.get('search') @@ -152,27 +159,49 @@ class JobTestService(CommonService): user_ids = [user['id'] for user in users] job_types = JobType.objects.filter(name=search).values('id') job_type_ids = [job_type['id'] for job_type in job_types] - search_sql = 'name LIKE "%{0}%" OR id like "%{0}%" '.format(search) + search_sql = 'name LIKE %s OR id like %s ' + sql_params.append('%' + search + '%') + sql_params.append('%' + search + '%') if user_ids: - search_sql += 'OR creator IN ({}) '.format(','.join(str(user_id) for user_id in user_ids)) + search_sql += 'OR creator IN %s ' + sql_params.append(tuple([user_id for user_id in user_ids])) if job_type_ids: - search_sql += 'OR job_type_id IN ({})'.format(','.join(str(type_id) for type_id in job_type_ids)) + search_sql += 'OR job_type_id IN %s' + sql_params.append(tuple([type_id for type_id in job_type_ids])) query_sql.append(f'AND ({search_sql})') if data.get('test_suite'): test_suite = json.loads(data.get('test_suite')) - test_suites = TestJobSuite.objects.filter(test_suite_id__in=test_suite).values('job_id') - job_ids = [test_suite['job_id'] for test_suite in test_suites] + test_suite_id_list = TestSuite.objects.filter(name__in=test_suite, query_scope='all'). \ + values_list('id', flat=True) + if test_suite_id_list and TestJobSuite.objects.filter(test_suite_id__in=test_suite_id_list).exists(): + job_ids = TestJobSuite.objects.filter(test_suite_id__in=test_suite_id_list). \ + values_list('job_id', flat=True) + query_sql.append('AND id IN %s') + sql_params.append(tuple(job_ids)) + else: + query_sql.append('AND id=0') + if data.get('test_conf'): + test_conf = data.get('test_conf') + test_case_ids = TestCase.objects.filter(name=test_conf).values('id').distinct() + job_ids = list() + if len(test_case_ids) > 0: + test_cases = TestJobCase.objects.filter(test_case_id__in=test_case_ids).values('job_id').distinct() + job_ids = [test_case['job_id'] for test_case in test_cases] if job_ids: - query_sql.append('AND id IN ({})'.format(','.join(str(job_id) for job_id in job_ids))) + query_sql.append('AND id IN %s') + sql_params.append(tuple([job_id for job_id in job_ids])) else: query_sql.append('AND id=0') if data.get('server'): server = data.get('server') - server_objs = TestServerSnapshot.objects.filter(ip=server) - cloud_server_objs = CloudServerSnapshot.objects.filter(private_ip=server) - id_li = list(set([obj.job_id for obj in server_objs]) | set([obj.job_id for obj in cloud_server_objs])) + server_objs = TestServerSnapshot.objects.filter(Q(ip=server) | Q(sn=server)) + cloud_server_objs = CloudServerSnapshot.objects.filter(Q(private_ip=server) | Q(sn=server)) + job_case = TestJobCase.objects.filter(server_ip=server) + id_li = list(set([obj.job_id for obj in server_objs]) | set([obj.job_id for obj in cloud_server_objs]) | + set([obj.job_id for obj in job_case])) if id_li: - query_sql.append('AND id IN ({})'.format(','.join(str(job_id) for job_id in id_li))) + query_sql.append('AND id IN %s') + sql_params.append(tuple(job_id for job_id in id_li)) else: query_sql.append('AND id=0') if data.get('tags'): @@ -180,7 +209,8 @@ class JobTestService(CommonService): job_tags = JobTagRelation.objects.filter(tag_id__in=tags).values('job_id') job_ids = [job_tag['job_id'] for job_tag in job_tags] if job_ids: - query_sql.append('AND id IN ({})'.format(','.join(str(job_id) for job_id in job_ids))) + query_sql.append('AND id IN %s') + sql_params.append(tuple(job_id for job_id in job_ids)) else: query_sql.append('AND id=0') if data.get('fail_case'): @@ -188,95 +218,87 @@ class JobTestService(CommonService): fail_cases = FuncResult.objects.filter(sub_case_name__in=fail_case, sub_case_result=2) job_ids = [fail_case.test_job_id for fail_case in fail_cases] if job_ids: - query_sql.append('AND id IN ({})'.format(','.join(str(job_id) for job_id in job_ids))) - else: - query_sql.append('AND id=0') - if data.get('test_conf'): - test_conf = data.get('test_conf') - test_case_ids = TestCase.objects.filter(name=test_conf).values('id').distinct() - job_ids = list() - if len(test_case_ids) > 0: - test_cases = TestJobCase.objects.filter(test_case_id__in=test_case_ids).values('job_id').distinct() - job_ids = [test_case['job_id'] for test_case in test_cases] - if job_ids: - query_sql.append('AND id IN ({})').format(','.join(str(job_id) for job_id in job_ids)) + query_sql.append('AND id IN %s') + sql_params.append(tuple(job_id for job_id in job_ids)) else: query_sql.append('AND id=0') if data.get('creation_time'): creation_time = data.get('creation_time') creation_time = json.loads(creation_time) start_time, end_time = self.check_time_fmt(creation_time) - query_sql.append('AND start_time BETWEEN "{}" AND "{}"'.format(start_time, end_time)) + query_sql.append('AND start_time BETWEEN %s AND %s') + sql_params.append(start_time) + sql_params.append(end_time) if data.get('completion_time'): completion_time = data.get('completion_time') completion_time = json.loads(completion_time) start_time, end_time = self.check_time_fmt(completion_time) end_state = tuple(['stop', 'fail', 'success']) query_sql.append( - 'AND end_time BETWEEN "{}" AND "{}" AND state IN {}'.format(start_time, end_time, end_state)) + 'AND end_time BETWEEN %s AND %s AND state IN %s'.format(start_time, end_time, end_state)) + sql_params.append(start_time) + sql_params.append(end_time) + sql_params.append(end_state) if data.get('filter_id'): - query_sql.append('AND NOT id IN ({})'.format(data.get('filter_id'))) + query_sql.append('AND NOT id IN (%s)') + sql_params.append(data.get('filter_id')) if data.get('creators'): creators = json.loads(data.get('creators')) - query_sql.append('AND creator IN ({})'.format(','.join(str(creator) for creator in creators))) + query_sql.append('AND creator IN %s') + sql_params.append(tuple(creator for creator in creators)) if data.get('test_type') or pass_test_type: test_type = data.get('test_type') if data.get('test_type') else pass_test_type - query_sql.append('AND test_type="{}"'.format(test_type)) + query_sql.append('AND test_type=%s') + sql_params.append(test_type) filter_fields = ['project_id', 'job_type_id', 'product_id', 'server_provider', 'product_version', 'ws_id'] for filter_field in filter_fields: if data.get(filter_field): - query_sql.append('AND {}="{}"'.format(filter_field, data.get(filter_field))) + query_sql.append('AND {}=%s'.format(filter_field)) + sql_params.append(data.get(filter_field).strip()) extend_sql = ' '.join(query_sql) func_view_config = BaseConfig.objects.filter(config_type='ws', ws_id=data.get('ws_id'), config_key='FUNC_RESULT_VIEW_TYPE').first() - with connection.cursor() as cursor: - search_sql = """ - SELECT - A.id, - A.name, - A.ws_id, - A.state, - A.state_desc, - A.test_type, - A.test_result, - A.project_id, - A.product_id, - A.creator, - A.callback_api, - A.start_time, - A.end_time, - A.gmt_created, - A.report_name, - A.report_template_id, - A.server_provider, - A.product_version, - A.created_from, A.baseline_id - FROM test_job A - RIGHT JOIN ( - SELECT id FROM test_job - WHERE is_deleted=0 and ws_id='{}' {} ORDER BY id DESC LIMIT {}, {}) B - ON A.id=B.id ORDER BY B.id DESC - """.format(data.get('ws_id'), extend_sql, (page_num - 1) * page_size, page_size) - - cursor.execute(search_sql) - rows = cursor.fetchall() - job_id_list = [row_data[0] for row_data in rows] - test_server_shot = TestServerSnapshot.objects.filter(job_id__in=job_id_list) - clould_server_shot = CloudServerSnapshot.objects.filter(job_id__in=job_id_list) - fun_result = FuncResult.objects.filter(test_job_id__in=job_id_list) - test_job_case = TestJobCase.objects.filter(job_id__in=job_id_list) - report_obj = ReportObjectRelation.objects.filter(object_id__in=job_id_list) - for row_data in rows: - self._get_test_res(collect_job_set, create_name_map, func_view_config, product_name_map, - project_name_map, res, row_data, test_type_map) - total = 0 - query_total = """ - SELECT COUNT(id) FROM test_job WHERE is_deleted=0 {} ORDER BY id DESC""".format(extend_sql) - cursor.execute(query_total) - rows = cursor.fetchall() - if rows: - total = rows[0][0] + search_sql = """ + SELECT + A.id, + A.name, + A.ws_id, + A.state, + A.state_desc, + A.test_type, + A.test_result, + A.project_id, + A.product_id, + A.creator, + A.callback_api, + A.start_time, + A.end_time, + A.gmt_created, + A.report_name, + A.report_template_id, + A.server_provider, + A.product_version, + A.created_from, + A.baseline_id + FROM test_job A + RIGHT JOIN ( + SELECT id FROM test_job + WHERE is_deleted=0 {} ORDER BY id DESC LIMIT {}, {}) B + ON A.id=B.id ORDER BY B.id DESC + """.format(extend_sql, (page_num - 1) * page_size, page_size) + if not data.get('ws_id'): + search_sql = search_sql.replace("and ws_id='None'", '') + rows = execute_sql(search_sql, sql_params) + for row_data in rows: + self._get_test_res(collect_job_set, create_name_map, func_view_config, + product_name_map, project_name_map, res, row_data, test_type_map) + total = 0 + query_total = """ + SELECT COUNT(id) FROM test_job WHERE is_deleted=0 {} ORDER BY id DESC""".format(extend_sql) + rows = execute_sql(query_total, sql_params) + if rows: + total = rows[0][0] return res, total def _get_test_res(self, collect_job_set, create_name_map, func_view_config, product_name_map, project_name_map, diff --git a/tone/services/plan/complete_plan_report.py b/tone/services/plan/complete_plan_report.py index d182242e18b887b75118bc981b52ffc3fc69fc82..c249c490704f0b99c23a1862ee7f6c9801a0ff57 100644 --- a/tone/services/plan/complete_plan_report.py +++ b/tone/services/plan/complete_plan_report.py @@ -633,7 +633,6 @@ def handle_no_group(plan_instance, plan_inst_id): # noqa: C901 test_env = CompareEnvInfoService().get_env_info(base_group, compare_groups) # 模板名称 ws_id = plan_instance.ws_id - job_li = func_job_list report_source = 'plan' default_tmpl_id = ReportTemplate.objects.filter(ws_id=ws_id, name='默认模板', query_scope='all').first().id name = plan_instance.report_name diff --git a/tone/services/report/report_services.py b/tone/services/report/report_services.py index ae627e91e457556801a74e169273dbea97fd039f..436e208a07d63912a67d1eebdecc78bca1a6b598 100644 --- a/tone/services/report/report_services.py +++ b/tone/services/report/report_services.py @@ -634,6 +634,11 @@ class ReportService(CommonService): report_item.desc = desc report_item.save() + def get_report_product_version(self, data): + ws_id = data.get('ws_id') + return Report.objects.filter(ws_id=ws_id).exclude(product_version='').\ + values_list('product_version', flat=True).distinct() + def save_report_detail_suite(report_id, test_suite_id, data): report_detail = ReportDetail.objects.filter(report_id=report_id).first() diff --git a/tone/services/sys/baseline_services.py b/tone/services/sys/baseline_services.py index 2f7514b7665662b7caffe152d72a7f5c9ffe649c..5ef833e48a32c49f12247e9ade05765976b3cb02 100644 --- a/tone/services/sys/baseline_services.py +++ b/tone/services/sys/baseline_services.py @@ -326,7 +326,7 @@ class BaselineUploadService(CommonService): except tarfile.ReadError: code = ErrorCode.UPLOAD_FILE_FORMAT.code msg = ErrorCode.UPLOAD_FILE_FORMAT.to_api - except Exception as ex: + except Exception: code = 201 msg = '' return code, msg, error_list diff --git a/tone/services/sys/chats_services.py b/tone/services/sys/chats_services.py index ce6e25ecb99387e2db8746e36d487c6132ecdd05..9e01ef047db7ea1848453e8d99b42f17cb7bc81f 100644 --- a/tone/services/sys/chats_services.py +++ b/tone/services/sys/chats_services.py @@ -211,7 +211,8 @@ class ChatsAnswerInfoService(CommonService): enable = data.get("enable") right_number = data.get("right_number") ChatsAnswer.objects.filter(id=answer_id).update(reason=reason, answer=answer, problem_type=problem_type, - problem_attribution=problem_attribution, enable=enable, right_number=right_number) + problem_attribution=problem_attribution, enable=enable, + right_number=right_number) return True, None @staticmethod @@ -483,7 +484,8 @@ class ChatsCheckInfoService(CommonService): } all_question_res.append(cur_question_res) if operator: - ChatsCollectInfoService.create({"contents": problem_desc, "contents_sources": ContentEnums.PROBLEM_SOURCES_CHOICES[0][0]}, operator, all_question_res) + ChatsCollectInfoService.create({"contents": problem_desc, "contents_sources": + ContentEnums.PROBLEM_SOURCES_CHOICES[0][0]}, operator, all_question_res) return True, all_question_res ''' diff --git a/tone/services/sys/testcase_services.py b/tone/services/sys/testcase_services.py index 139db675daa3cd63f8db6854c25b261f2228f9de..b69deef2ebdfce6205384de0dc96903db72671ce 100644 --- a/tone/services/sys/testcase_services.py +++ b/tone/services/sys/testcase_services.py @@ -289,7 +289,7 @@ class TestSuiteService(CommonService): view_type_list = data.get('view_type').split(',') q &= Q(view_type__in=view_type_list) if data.get('scope'): - suite_id_list = TestCase.objects.all().values_list('test_suite_id', flat=True) + suite_id_list = TestCase.objects.all().values_list('test_suite_id', flat=True).distinct() q &= Q(id__in=suite_id_list) return q diff --git a/tone/services/sys/workspace_services.py b/tone/services/sys/workspace_services.py index 3965383ac94bf3c02e7a5fb19472a6ccdccce3ba..e001bf88527070b7da01eb204773c6e178671083 100644 --- a/tone/services/sys/workspace_services.py +++ b/tone/services/sys/workspace_services.py @@ -774,8 +774,8 @@ class WorkspaceSelectService(CommonService): # 按照config_list_ws_id里面的顺序排列 if config_list_ws_id: preserved = Case(*[When(id=pk, then=pos) for pos, pk in enumerate(config_list_ws_id)]) - queryset = Workspace.objects.filter(Q(Q(id__in=config_list_ws_id) | Q(is_common=True)) & - Q(is_approved=True)).order_by(preserved) + queryset = Workspace.objects.filter(Q(Q(id__in=config_list_ws_id) | Q(is_common=True)) + & Q(is_approved=True)).order_by(preserved) return queryset else: queryset = Workspace.objects.filter(is_common=True) @@ -831,8 +831,8 @@ class AllWorkspaceService(CommonService): if config_list_ws_id: preserved = Case(*[When(id=pk, then=pos) for pos, pk in enumerate(config_list_ws_id)]) # 查找出id在config_list_ws_id或通用的ws,并且审核通过的ws,根据preserved排序,定义为queryset_need_update - queryset_need_update = Workspace.objects.filter(Q(Q(id__in=config_list_ws_id) | Q(is_common=True)) & - Q(is_approved=True)).order_by(preserved) + queryset_need_update = Workspace.objects.filter(Q(Q(id__in=config_list_ws_id) | Q(is_common=True)) + & Q(is_approved=True)).order_by(preserved) # 查找出不属于queryset_update并且非通用的ws,定义为queryset_no_need_update queryset_dont_need_update = Workspace.objects.filter(is_approved=True, is_common=False).exclude(id__in=queryset_need_update) diff --git a/tone/urls/report_urls.py b/tone/urls/report_urls.py index 6084bfbf5daf07213f8744c27f70e6e6396fc8ce..c8b35e8c1fd132b1a107d84f319b263b91055b0c 100644 --- a/tone/urls/report_urls.py +++ b/tone/urls/report_urls.py @@ -20,5 +20,6 @@ urlpatterns = [ path('get_by_plan//', report_views.ReportDailyView.as_view(), name='get_by_plan'), path('test/report/item_suite/', report_views.ReportItemSuiteView.as_view(), name='report_item_suite'), path('test/report/update_desc/', report_views.ReportDescView.as_view(), name='report_update_desc'), -path('test/report/item/update_desc/', report_views.ReportItemDescView.as_view(), name='report_item_desc'), + path('test/report/item/update_desc/', report_views.ReportItemDescView.as_view(), name='report_item_desc'), + path('test/report/product/version/', report_views.ReportProductVersionView.as_view(), name='get_product_version'), ] diff --git a/tone/views/api/create_job.py b/tone/views/api/create_job.py index da825be1ddceacb67304394398ebfcbce18eb95a..ca9cb4f3de02ea081a81b95f7ea0588653c45219 100644 --- a/tone/views/api/create_job.py +++ b/tone/views/api/create_job.py @@ -21,6 +21,7 @@ from tone.core.common.expection_handler.error_code import ErrorCode from tone.core.common.expection_handler.error_catch import api_catch_error from tone.core.utils.permission_manage import check_ws_operator_permission from tone.serializers.job.test_serializers import JobSerializerForAPI +from tone.core.common.job_result_helper import splice_job_link @api_catch_error @@ -54,6 +55,7 @@ def job_create(request): job_data['job_id'] = test_job.id job_data['job_name'] = test_job.name job_data['test_type'] = test_job.test_type + job_data['job_link'] = splice_job_link(test_job) resp.data = job_data return resp.json_resp() @@ -183,11 +185,11 @@ def conversion_data(data): # noqa: C901 else: kernel_info['kernel_packages'] = [] if kernel_info_obj.kernel_link: - kernel_info['kernel_packages'].append(kernel_info_obj.kernel_link) + kernel_info['kernel_packages'].append(kernel_info_obj.kernel_link.strip()) if kernel_info_obj.devel_link: - kernel_info['kernel_packages'].append(kernel_info_obj.devel_link) + kernel_info['kernel_packages'].append(kernel_info_obj.devel_link.strip()) if kernel_info_obj.headers_link: - kernel_info['kernel_packages'].append(kernel_info_obj.headers_link) + kernel_info['kernel_packages'].append(kernel_info_obj.headers_link.strip()) data['kernel_info'] = kernel_info else: data['kernel_info'] = kernel_info_format(data.get('kernel_info', dict())) diff --git a/tone/views/api/query_job.py b/tone/views/api/query_job.py index 97c20669342fad920021f7a769f5c6136c23717a..c6c01c1ca9b9fc69085dcc4dd65f3f325065775e 100644 --- a/tone/views/api/query_job.py +++ b/tone/views/api/query_job.py @@ -14,7 +14,8 @@ from tone.core.utils.common_utils import query_all_dict from tone import settings from tone.core.common.constant import FUNC_CASE_RESULT_TYPE_MAP, PERFORMANCE from tone.models import TestJob, TestJobCase, TestSuite, TestCase, PerfResult, FuncResult, JobType, Project, \ - Workspace, ResultFile, TestCluster, TestClusterServer, CloudServer, TestStep, Product, BatchJobRelation + Workspace, ResultFile, TestCluster, TestClusterServer, CloudServer, TestStep, Product, BatchJobRelation, \ + WorkspaceMember, User from tone.core.utils.helper import CommResp from tone.core.common.expection_handler.error_code import ErrorCode from tone.core.common.expection_handler.error_catch import api_catch_error @@ -187,8 +188,8 @@ def get_job_count(job_id, baseline_id, test_type): job_case_dict = dict() for job_case_count in job_case_list: key = str(job_case_count['test_suite_id']) + '_' + str(job_case_count['test_case_id']) - na = job_case_count['total'] - job_case_count['increase'] - job_case_count['decline'] - \ - job_case_count['normal'] - job_case_count['invalid'] + na = (job_case_count['total'] - job_case_count['increase'] - job_case_count['decline']) - \ + (job_case_count['normal'] - job_case_count['invalid']) job_case_dict[key] = dict( { 'count': job_case_count['total'], @@ -343,11 +344,17 @@ def get_project(request): @api_catch_error @token_required def get_workspace(request): - if not check_admin_operator_permission(request.GET.get('username', None)): - assert None, ValueError(ErrorCode.PERMISSION_ERROR) + ws_list = list() resp = CommResp() - queryset = Workspace.objects.all() - ws_list = [{'id': ws.id, 'name': ws.name} for ws in queryset] + if check_admin_operator_permission(request.GET.get('username', None)): + queryset = Workspace.objects.filter(is_approved=True) + ws_list = [{'id': ws.id, 'name': ws.name} for ws in queryset] + else: + user = User.objects.filter(username=request.GET.get('username', None)).first() + if user: + ws_id_list = WorkspaceMember.objects.filter(user_id=user.id).values_list('ws_id', flat=True).distinct() + queryset = Workspace.objects.filter(is_approved=True, id__in=ws_id_list) + ws_list = [{'id': ws.id, 'name': ws.name} for ws in queryset] resp.data = ws_list return resp.json_resp() @@ -556,8 +563,8 @@ def job_log_query(request): def get_job_case_logs(result_dict, job_case, job_log_files): - result_files = [log for log in job_log_files if log[0] == job_case.test_suite_id and - log[1] == job_case.test_case_id] + result_files = [log for log in job_log_files + if log[0] == job_case.test_suite_id and log[1] == job_case.test_case_id] key = str(job_case.test_suite_id) + '_' + str(job_case.test_case_id) result_dict[key] = list() for result_file in result_files: diff --git a/tone/views/job/report_views.py b/tone/views/job/report_views.py index e1be5c93dd76d16cddbc12b450e455e02da3eaca..13caaac3b505f354ecb41c68e4e333df8dd42cc1 100644 --- a/tone/views/job/report_views.py +++ b/tone/views/job/report_views.py @@ -215,3 +215,19 @@ class ReportItemDescView(CommonAPIView): """ self.service.update_report_item_desc(request.data) return Response(self.get_response_code()) + + +class ReportProductVersionView(CommonAPIView): + serializer_class = ReportSerializer + queryset = Report.objects.all() + service_class = ReportService + permission_classes = [] + + @method_decorator(views_catch_error) + def get(self, request): + """ + 创建测试报告 + """ + res_list = self.service.get_report_product_version(request.GET) + response_data = self.get_response_only_for_data(res_list) + return Response(response_data) diff --git a/tone/views/portal/sync_portal_views.py b/tone/views/portal/sync_portal_views.py index dcac5e41f101b9b1897063873d2c401e0da53742..5cb8989cd22d86cf08dd10235387696dcc1e9d4c 100644 --- a/tone/views/portal/sync_portal_views.py +++ b/tone/views/portal/sync_portal_views.py @@ -55,4 +55,4 @@ class SyncPortalFuncView(BaseView): response_data = self.get_response_code() response_data['code'] = code response_data['msg'] = msg - return Response(response_data) \ No newline at end of file + return Response(response_data) diff --git a/tone/views/sys/product_views.py b/tone/views/sys/product_views.py index 1868e71a6fa3ce6c46071023c6e9c86db33b86be..2e198ab40df88d1ceca673288c4d787d880238c9 100644 --- a/tone/views/sys/product_views.py +++ b/tone/views/sys/product_views.py @@ -303,4 +303,4 @@ class ProjectDragView(CommonAPIView): response_data = self.get_response_data(instances) else: response_data = self.get_response_code(code=201, msg=instances) - return Response(response_data) \ No newline at end of file + return Response(response_data)