1515from collections import defaultdict
1616from datetime import datetime , timedelta
1717import fnmatch
18+ import hashlib
1819from hashlib import sha256
1920import json
2021import os
2122from pathlib import Path
2223import sqlalchemy
24+ from sqlalchemy .orm import Session as SA_Session
2325import tempfile
2426import time
2527from typing import Any , Callable , Dict , List , Optional , Set , Tuple , Union , \
4648from ..database .config_db_model import Product
4749from ..database .database import DBSession
4850from ..database .run_db_model import \
49- AnalysisInfo , AnalysisInfoChecker , AnalyzerStatistic , \
51+ AnalysisInfo , AnalysisInfoChecker , AnalysisInfoFile , AnalyzerStatistic , \
5052 BugPathEvent , BugReportPoint , \
5153 Checker , \
5254 ExtendedReportData , \
@@ -814,8 +816,8 @@ def __add_file_content(
814816 self ,
815817 session : DBSession ,
816818 source_file_name : str ,
817- content_hash : Optional [str ]
818- ):
819+ content_hash : Optional [str ] = None
820+ ) -> str :
819821 """
820822 Add the necessary file contents. If content_hash in None then this
821823 function calculates the content hash. Or if it's available at the
@@ -871,6 +873,8 @@ def __add_file_content(
871873 # the meantime.
872874 session .rollback ()
873875
876+ return content_hash
877+
874878 def __store_checker_identifiers (self , checkers : Set [Tuple [str , str ]]):
875879 """
876880 Stores the identifiers "(analyzer, checker_name)" in the database into
@@ -1000,6 +1004,32 @@ def __store_analysis_statistics(
10001004
10011005 session .add (analyzer_statistics )
10021006
1007+ def __store_analysis_info_files (
1008+ self ,
1009+ session : SA_Session ,
1010+ analysis_info_id : int ,
1011+ report_dir_path : str
1012+ ):
1013+ """ Store analyzer related config files (e.g. skipfile) """
1014+ conf_dir_path = os .path .join (report_dir_path , "conf" )
1015+ zip_conf_dir = os .path .join (
1016+ self ._zip_dir , "reports" ,
1017+ hashlib .md5 (conf_dir_path .encode ('utf-8' )).hexdigest ())
1018+
1019+ if not os .path .isdir (zip_conf_dir ):
1020+ return
1021+
1022+ for file in os .listdir (os .fsencode (zip_conf_dir )):
1023+ conf_file = os .path .join (zip_conf_dir , os .fsdecode (file ))
1024+ content_hash = self .__add_file_content (session , conf_file )
1025+
1026+ if (not session .get (AnalysisInfoFile ,
1027+ (analysis_info_id , content_hash ))):
1028+ session .add (AnalysisInfoFile (
1029+ analysis_info_id = analysis_info_id ,
1030+ filename = os .path .basename (conf_file ),
1031+ content_hash = content_hash ))
1032+
10031033 def __store_analysis_info (
10041034 self ,
10051035 session : DBSession ,
@@ -1012,37 +1042,30 @@ def __store_analysis_info(
10121042 analyzer_command .encode ("utf-8" ),
10131043 zlib .Z_BEST_COMPRESSION )
10141044
1015- analysis_info_rows = session \
1016- .query (AnalysisInfo ) \
1017- .filter (AnalysisInfo .analyzer_command == cmd ) \
1018- .all ()
1019-
1020- if analysis_info_rows :
1021- # It is possible when multiple runs are stored
1022- # simultaneously to the server with the same analysis
1023- # command that multiple entries are stored into the
1024- # database. In this case we will select the first one.
1025- analysis_info = analysis_info_rows [0 ]
1026- else :
1027- analysis_info = AnalysisInfo (analyzer_command = cmd )
1028-
1029- # Obtain the ID eagerly to be able to use the M-to-N table.
1030- session .add (analysis_info )
1031- session .flush ()
1032- session .refresh (analysis_info , ["id" ])
1033-
1034- for analyzer in mip .analyzers :
1035- q = session \
1036- .query (Checker ) \
1037- .filter (Checker .analyzer_name == analyzer )
1038- db_checkers = {r .checker_name : r for r in q .all ()}
1039-
1040- connection_rows = [AnalysisInfoChecker (
1041- analysis_info , db_checkers [chk ], is_enabled )
1042- for chk , is_enabled
1043- in mip .checkers .get (analyzer , {}).items ()]
1044- for r in connection_rows :
1045- session .add (r )
1045+ analysis_info = AnalysisInfo (analyzer_command = cmd )
1046+
1047+ # Obtain the ID eagerly to be able to use the M-to-N table.
1048+ session .add (analysis_info )
1049+ session .flush ()
1050+ session .refresh (analysis_info , ["id" ])
1051+
1052+ for analyzer in mip .analyzers :
1053+ q = session \
1054+ .query (Checker ) \
1055+ .filter (Checker .analyzer_name == analyzer )
1056+ db_checkers = {r .checker_name : r for r in q .all ()}
1057+
1058+ connection_rows = [AnalysisInfoChecker (
1059+ analysis_info , db_checkers [chk ], is_enabled )
1060+ for chk , is_enabled
1061+ in mip .checkers .get (analyzer , {}).items ()]
1062+ for r in connection_rows :
1063+ session .add (r )
1064+
1065+ if mip .report_dir_path :
1066+ self .__store_analysis_info_files (session ,
1067+ analysis_info .id ,
1068+ mip .report_dir_path )
10461069
10471070 run_history .analysis_info .append (analysis_info )
10481071 self .__analysis_info [src_dir_path ] = analysis_info
0 commit comments