Skip to content

Commit

Permalink
Optimized test and updated instructions. Support for saving and loadi…
Browse files Browse the repository at this point in the history
…ng models to file.
  • Loading branch information
Knucklessg1 committed Aug 22, 2024
1 parent d9f5f18 commit 4761b43
Show file tree
Hide file tree
Showing 4 changed files with 193 additions and 85 deletions.
150 changes: 105 additions & 45 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ Includes a large portion of useful API calls to GitLab and SQLAlchemy Models to

This repository is actively maintained - Contributions are welcome!

Additional Features:
- All responses are returned as native Pydantic models
- Save Pydantic models to pickle files locally
- Easily convert Pydantic to SQLAlchemy models for quick database insertion


### API Calls:
- Branches
- Commits
Expand All @@ -47,6 +53,7 @@ This repository is actively maintained - Contributions are welcome!
- Users
- Wiki


<details>
<summary><b>Usage:</b></summary>

Expand All @@ -56,8 +63,10 @@ Using the API directly
#!/usr/bin/python

import gitlab_api
from gitlab_api.utils import pydantic_to_sqlalchemy, upsert
from gitlab_api.gitlab_db_models import BaseDBModel as Base
from gitlab_api import pydantic_to_sqlalchemy, upsert, save_model, load_model
from gitlab_api.gitlab_db_models import (
BaseDBModel as Base,
)
import urllib3
import os
from urllib.parse import quote_plus
Expand All @@ -84,95 +93,146 @@ if __name__ == "__main__":
)
print("GitLab Client Created\n\n")

print("Creating Engine")
engine = create_engine(
f"postgresql://{postgres_username}:{quote_plus(postgres_password)}@"
f"{postgres_db_host}:{postgres_port}/{postgres_db_name}"
)
print("Engine Created\n\n")

print("Creating Tables...")
Base.metadata.create_all(engine)
print("Tables Created\n\n")

print("Creating Session...")
Session = sessionmaker(bind=engine)
session = Session()
print("Session Created\n\n")

print("Fetching GitLab Data...")
# User Data table is a dependency table
user_response = client.get_users()
user_db_model = pydantic_to_sqlalchemy(schema=user_response.data)
print("\nFetching User Data...")
user_response = client.get_users(active=True, humans=True)
print(
f"Users ({len(user_response.data.users)}) Fetched - "
f"Users ({len(user_response.data)}) Fetched - "
f"Status: {user_response.status_code}\n"
)

# Namespaces table is a dependency table
print("\nFetching Namespace Data...")
namespace_response = client.get_namespaces()
namespace_db_model = pydantic_to_sqlalchemy(schema=namespace_response.data)
print(
f"Namespaces ({len(namespace_response.data.namespaces)}) Fetched - "
f"Namespaces ({len(namespace_response.data)}) Fetched - "
f"Status: {namespace_response.status_code}\n"
)

# Project table requires Users and Namespaces
print("\nFetching Project Data...")
project_response = client.get_nested_projects_by_group(group_id=2, per_page=100)
project_db_model = pydantic_to_sqlalchemy(schema=project_response.data)
print(
f"Projects ({len(project_response.data.projects)}) Fetched - "
f"Projects ({len(project_response.data)}) Fetched - "
f"Status: {project_response.status_code}\n"
)

# Merge Requests table requires Users, Namespaces, and Projects
print("\nFetching Merge Request Data...")
merge_request_response = client.get_group_merge_requests(
argument="state=all", group_id=2
)
merge_request_db_model = pydantic_to_sqlalchemy(schema=merge_request_response.data)

print(
f"Merge Requests ({len(merge_request_response.data.merge_requests)}) Fetched - "
f"\nMerge Requests ({len(merge_request_response.data)}) Fetched - "
f"Status: {merge_request_response.status_code}\n"
)

# Pipeline Jobs table
pipeline_job_response = None
for project in project_response.data.projects:
for project in project_response.data:
job_response = client.get_project_jobs(project_id=project.id)
if not pipeline_job_response and hasattr(job_response, "data") and hasattr(job_response.data, "jobs") and len(job_response.data.jobs)>0:
if (
not pipeline_job_response
and hasattr(job_response, "data")
and len(job_response.data) > 0
):
pipeline_job_response = job_response
elif pipeline_job_response and hasattr(job_response, "data") and hasattr(job_response.data, "jobs") and len(job_response.data.jobs)>0:
pipeline_job_response.data.jobs.extend(job_response.data.jobs)
elif (
pipeline_job_response
and hasattr(job_response, "data")
and len(job_response.data) > 0
):
pipeline_job_response.data.extend(job_response.data)
print(
f"Pipeline Jobs ({len(getattr(pipeline_job_response.data, "jobs", []))}) Fetched for Project ({project.id}) - "
f"Pipeline Jobs ({len(getattr(pipeline_job_response, 'data', []))}) "
f"Fetched for Project ({project.id}) - "
f"Status: {pipeline_job_response.status_code}\n"
)

pipeline_db_model = pydantic_to_sqlalchemy(schema=pipeline_job_response.data)
print("Saving Pydantic Models...")
user_file = save_model(model=user_response, file_name="user_model", file_path=".")
namespace_file = save_model(
model=namespace_response, file_name="namespace_model", file_path="."
)
project_file = save_model(
model=project_response, file_name="project_model", file_path="."
)
merge_request_file = save_model(
model=merge_request_response, file_name="merge_request_model", file_path="."
)
pipeline_job_file = save_model(
model=pipeline_job_response, file_name="pipeline_job_model", file_path="."
)
print("Models Saved")

print("Loading Pydantic Models...")
user_response = load_model(file=user_file)
namespace_response = load_model(file=namespace_file)
project_response = load_model(file=project_file)
merge_request_response = load_model(file=merge_request_file)
pipeline_job_response = load_model(file=pipeline_job_file)
print("Models Loaded")

print("Converting Pydantic to SQLAlchemy model...")
user_db_model = pydantic_to_sqlalchemy(schema=user_response)
print(f"Database Models: {user_db_model}\n")

print("Inserting Users Into Database...")
print("Converting Pydantic to SQLAlchemy model...")
namespace_db_model = pydantic_to_sqlalchemy(schema=namespace_response)
print(f"Database Models: {namespace_db_model}\n")

print("Converting Pydantic to SQLAlchemy model...")
project_db_model = pydantic_to_sqlalchemy(schema=project_response)
print(f"Database Models: {project_db_model}\n")

print("Converting Pydantic to SQLAlchemy model...")
merge_request_db_model = pydantic_to_sqlalchemy(schema=merge_request_response)
print(f"Database Models: {merge_request_db_model}\n")

print("Converting Pydantic to SQLAlchemy model...")
pipeline_db_model = pydantic_to_sqlalchemy(schema=pipeline_job_response)
print(f"Database Models: {pipeline_db_model}\n")

print("Creating Engine")
engine = create_engine(
f"postgresql://{postgres_username}:{quote_plus(postgres_password)}@"
f"{postgres_db_host}:{postgres_port}/{postgres_db_name}"
)
print("Engine Created\n\n")

print("Creating Tables...")
Base.metadata.create_all(engine)
print("Tables Created\n\n")

print("Creating Session...")
Session = sessionmaker(bind=engine)
session = Session()
print("Session Created\n\n")

print(f"Inserting ({len(user_response.data)}) Users Into Database...")
upsert(session=session, model=user_db_model)
print("Users Synchronization Complete!\n")

print("Inserting Namespaces Into Database...")
print(f"Inserting ({len(namespace_response.data)}) Namespaces Into Database...")
upsert(session=session, model=namespace_db_model)
print("Namespaces Synchronization Complete!\n")

print("Inserting Projects Into Database...\n")
print(f"Inserting ({len(project_response.data)}) Projects Into Database...\n")
upsert(session=session, model=project_db_model)
print("Projects Synchronization Complete!\n")

print("Inserting Merge Requests Into Database...")
print(
f"Inserting ({len(merge_request_response.data)}) Merge Requests Into Database..."
)
upsert(session=session, model=merge_request_db_model)
print("Merge Request Synchronization Complete!\n")

print(f"Inserting ({len(pipeline_job_response.data.jobs)}) Pipeline Jobs Into Database...")
print(
f"Inserting ({len(pipeline_job_response.data)}) Pipeline Jobs Into Database..."
)
upsert(session=session, model=pipeline_db_model)
print("Pipeline Jobs Synchronization Complete!\n\n\n")
print("Pipeline Jobs Synchronization Complete!\n")

session.close()
print("Session Closed")


```

</details>
Expand Down
2 changes: 2 additions & 0 deletions gitlab_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@
upsert,
create_table,
pydantic_to_sqlalchemy,
save_model,
load_model,
)

"""
Expand Down
15 changes: 15 additions & 0 deletions gitlab_api/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
#!/usr/bin/python
# coding: utf-8
import logging
import os
import pickle
from typing import Union, Any

from sqlalchemy.engine import reflection
Expand Down Expand Up @@ -145,3 +147,16 @@ def create_table(db_instance, engine):
logging.debug(f"Table {table_name} created.")
else:
logging.debug(f"Table {table_name} already exists.")


def save_model(model: Any, file_name: str = "model", file_path: str = ".") -> str:
pickle_file = os.path.join(file_path, f"{file_name}.pkl")
with open(pickle_file, "wb") as file:
pickle.dump(model, file)
return pickle_file


def load_model(file: str) -> Any:
with open(file, "rb") as model_file:
model = pickle.load(model_file)
return model
Loading

0 comments on commit 4761b43

Please sign in to comment.