Skip to content

Commit

Permalink
Merge branch 'master' into paging_special_case_ubuntu
Browse files Browse the repository at this point in the history
  • Loading branch information
another-rex authored Sep 13, 2024
2 parents c45babf + 09f3aa0 commit a23c8b6
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 78 deletions.
7 changes: 4 additions & 3 deletions docker/importer/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,8 @@ def convert_blob_to_vuln(blob: storage.Blob) -> Optional[Tuple[str, str]]:
if not _is_vulnerability_file(source_repo, blob.name):
return None
if not ignore_last_import_time and \
not blob.time_created > utc_last_update_date:
blob.updated is not None and \
not blob.updated > utc_last_update_date:
return None

logging.info('Bucket entry triggered for %s/%s', source_repo.bucket,
Expand Down Expand Up @@ -701,8 +702,8 @@ def _process_updates_rest(self, source_repo: osv.SourceRepository):
logging.info('Entry does not have an OSV entry: %s', vuln.id)
continue
except Exception as e:
logging.excecption('Failed to parse %s: error type: %s, details: %s',
vuln.id, e.__class__.__name__, e)
logging.exception('Failed to parse %s: error type: %s, details: %s',
vuln.id, e.__class__.__name__, e)
import_failure_logs.append('Failed to parse vulnerability "' + vuln.id +
'"')
continue
Expand Down
94 changes: 33 additions & 61 deletions gcp/appengine/frontend3/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 5 additions & 13 deletions tools/datafix/reimport_gcs_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,26 +114,19 @@ def bucket_for_source(client: datastore.Client, source: str) -> str:
return result[0]['bucket']


def reset_object_creation(bucket_name: str,
blob_name: str,
tmpdir="/tmp") -> None:
def reset_object_modification(bucket_name: str, blob_name: str) -> None:
"""Resets a GCS object's creation time.
Copies the object locally and uploads it again.
Makes a no-op patch ("gcloud object storage objects update" equivalent)
Args:
bucket_name: the name of the GCS bucket.
blob_name: the name of the object in the bucket.
tmpdir: a preexisting directory in the local filesystem to copy the object
to/from.
"""
local_tmp_file = os.path.join(tmpdir, os.path.basename(blob_name))
gcs_client = storage.Client()
bucket = gcs_client.bucket(bucket_name)
blob = bucket.blob(blob_name)
blob.download_to_filename(local_tmp_file)
blob.upload_from_filename(local_tmp_file, retry=retry.DEFAULT_RETRY)
os.unlink(local_tmp_file)
blob.patch(retry=retry.DEFAULT_RETRY)


def main() -> None:
Expand Down Expand Up @@ -204,11 +197,10 @@ def main() -> None:
print(f"Skipping {bug['db_id']}, got {e}\n")
continue
if args.verbose:
print(f"Resetting creation time for {bug_in_gcs['uri']}")
print(f"Resetting modification time for {bug_in_gcs['uri']}")
if not args.dryrun:
try:
reset_object_creation(bug_in_gcs["bucket"], bug_in_gcs["path"],
args.tmpdir)
reset_object_modification(bug_in_gcs["bucket"], bug_in_gcs["path"])
except NotFound as e:
if args.verbose:
print(f"Skipping, got {e}\n")
Expand Down

0 comments on commit a23c8b6

Please sign in to comment.