PyXMake Developer Guide 1.0
PyXMake
Loading...
Searching...
No Matches
gitlab.py
1# -*- coding: utf-8 -*-
2# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3# % PyXMake - Build environment for PyXMake %
4# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5"""
6Triple-use minimum working example for PyXMake.
7Technically, this script runs w/o PyXMake, but the default pipeline refers to the project..
8
9@note: Execute a GitLab pipeline or a given pipeline job remotely w/o non-default packages.
10
11@version: 1.0
12----------------------------------------------------------------------------------------------
13@requires:
14 - GitLab X-API-Token
15
16@date:
17 - 12.01.2021
18
19@author: garb_ma [DLR-FA,STM Braunschweig]
20----------------------------------------------------------------------------------------------
21"""
22import time
23import sys
24import os
25import re
26import copy
27import getpass
28import posixpath
29
30try:
31 import PyXMake as _ #@UnusedImport
32except ImportError:
33 # Script is executed as a plug-in
34 sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
35finally:
36 from PyXMake.Tools import Utility
37 from PyXMake.Plugin.__gitlab import main, housekeeping, release #@UnusedImport
38
39def datacheck(**kwargs):
40 """
41 Return the given input data
42 """
43 ## Add additional path to environment variable
44 if os.path.exists(os.path.join(sys.prefix,"conda-meta")) and os.path.join(sys.prefix,"Library","bin") not in os.getenv("PATH",""):
45 os.environ["PATH"] = os.pathsep.join([os.path.join(sys.prefix,"Library","bin"),os.getenv("PATH","")])
46
47 # Definition of the header
48 if not kwargs.get("token",None): raise ValueError
49 auth = {"PRIVATE-TOKEN": kwargs.get("token")}
50
51 # Definition of the GitLab project ID (an integer number) and the API v4 URL
52 api_v4_url = kwargs.get("base_url","https://gitlab.dlr.de/api/v4")
53
54 # Return all default values.
55 return [api_v4_url, auth]
56
57def download(package=None, projectid=None, version=".", **kwargs):
58 """
59 Download all resources for a package from the default registry
60 """
61 # Return all default values and initialize path if required
62 if kwargs.get("datacheck",False): return datacheck(**kwargs)
63 else: base_url, auth = datacheck(**kwargs)
64
65 # Now the requests module can be load w/o errors.
66 import requests
67
68 # Use HTML parser
69 from bs4 import BeautifulSoup
70
71 # Compatibility with CLI parser
72 if not projectid: projectid = kwargs.get("identifier",None)
73
74 # This function cannot be executed w/o a package a project id
75 if not package or not projectid: raise ValueError
76
77 # Add project ID to base API URL
78 api_v4_url = posixpath.join(base_url,"projects",projectid)
79 r = requests.get(posixpath.join(api_v4_url,"packages","pypi","simple",package),headers=auth);
80
81 # Set the output path. Defaults to the current working directory
82 path = os.path.abspath(kwargs.get("output",os.getcwd()))
83 # Create full output path
84 os.makedirs(path, exist_ok=True)
85
86 # Collect all entries for the given versions. Defaults to all.
87 data = [x["href"] for x in BeautifulSoup(r.text,'html.parser').find_all('a', {'href': re.compile(api_v4_url)}) if version in x["href"]]
88
89 # Download all files into the changed and or created directory
91 for url in data:
92 with requests.get(url, stream=True, headers=auth) as r:
93 r.raise_for_status()
94 file_name = Utility.PathLeaf(url.split("#")[0])
95 with open(file_name, 'wb') as f:
96 for chunk in r.iter_content(chunk_size=8192):
97 if chunk: f.write(chunk)
98
99 # Return success by presenting a list of all files
100 return os.listdir(path)
101
102def pipeline(token=None, projectid=str(12702), **kwargs):
103 """
104 Main function to execute the script if main cannot be imported.
105 Otherwise, run a named pipeline for a given project with the given credentials.
106 Defaults to running a remote install script on CARA.
107 """
108 # Return all default values and initialize path if required
109 settings = copy.deepcopy(kwargs); settings.update({"token":token})
110 if kwargs.get("datacheck",False): return datacheck(**settings)
111 else: base_url, auth = datacheck(**settings)
112
113 # Now the requests module can be load w/o errors.
114 import requests
115
116 # Add project ID to base API url
117 api_v4_url = posixpath.join(base_url,"projects",projectid)
118
119 # Definition of CI job and the branch of the corresponding CI script
120 job = kwargs.get("job_name",None)
121 data= {"ref": kwargs.get("branch", "master")}
122 variables = kwargs.get("api_variables",{})
123
124 # Additional variables parsed to the CI job. Meaningful default values are only set for auto-installation of software on CARA.
125 if job and job in ["stm_cara"]:
126 ## The default installation directory is the current user's home directory. This is only a meaningful
127 # default value if job refers to a CARA build requests
128 cara_login_user = kwargs.get("cara_login_user",getpass.getuser())
129 cara_login_credentials = kwargs.get("cara_login_credentials", getpass.getpass())
130 install_directory = kwargs.get("cara_install_directory",posixpath.join("/home",cara_login_user,"software"))
131 variables = kwargs.get("api_variables",
132 {"USER":cara_login_user,"CREDENTIALS":cara_login_credentials, "directory":install_directory,"feature":kwargs.get("package","all")})
133 query = "&".join(["variables[][key]="+str(x)+"&variables[][value]="+str(y) for x, y in variables.items()])
134
135 # Create a new dummy pipeline with the corresponding job. Terminate the pipeline immediately, since only one job is of interest.
136 r = requests.post(api_v4_url+"/pipeline?"+query, data=data, headers=auth);
137
138 # Only meaningful if one job is requested in particular.
139 if job:
140 ## If a specific job is given, create a new pipeline and run only this job.
141 # Remove the pipeline afterwards by default. Requires owner credentials.
142 r = requests.post(api_v4_url+"/pipelines/%s/cancel" % r.json()["id"], headers=auth)
143 r = requests.get(api_v4_url+"/jobs", headers=auth)
144 r = requests.post(api_v4_url+"/jobs/%s/play" % [x for x in r.json() if x["name"] in [job]][0]["id"], headers=auth)
145 r = requests.get(api_v4_url+"/jobs", headers=auth)
146
147 # Get the job ID of the running job
148 JobID = [x for x in r.json() if x["name"] in [job]][0]["id"]
149 while True:
150 r = requests.get(api_v4_url+"/jobs/%s" % JobID, headers=auth)
151 # Check status. Either return immediately or wait for job completion
152 if r.json()["status"] in ["pending", "running"] and False: break
153 if r.json()["status"] in ["success", "failure"]:
154 PipeID = requests.get(api_v4_url+"/jobs/%s" % r.json()["id"], headers=auth).json()["pipeline"]["id"];
155 r = requests.get(api_v4_url+"/jobs/%s/trace" % r.json()["id"], headers=auth);
156 break
157 time.sleep(2)
158 ## Attempt to delete the pipeline. This is only successful when pipeline succeeded or failed.
159 # Requires owner credentials.
160 try: requests.delete(api_v4_url+"/pipelines/%s" % PipeID, headers=auth)
161 except: pass
162
163 # Obtain detailed information
164 try: result = r.json()
165 except: result = {"status_code":r.status_code,"content":r.text}
166
167 # Return final result code and response
168 return result
169
170# Use default project function as main when import fails
171if not hasattr(sys.modules[__name__], "main"):
172 __settings = {}
173 setattr(sys.modules[__name__], "main", pipeline)
174else: __settings = {"register":{"datacheck":datacheck,"pipeline":pipeline,"download":download}}
175
176if __name__ == "__main__":
177 main(**__settings); sys.exit()
Class to create 2to3 compatible pickling dictionary.
Module containing basic functionalities defined for convenience.
Definition __init__.py:1
pipeline(token=None, projectid=str(12702), **kwargs)
Definition gitlab.py:102
download(package=None, projectid=None, version=".", **kwargs)
Definition gitlab.py:57
datacheck(**kwargs)
Definition gitlab.py:39