repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
qubell/contrib-python-qubell-client
qubell/api/tools/__init__.py
patch_env
def patch_env(env, path, value): """ Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777 """ def pathGet(dictionary, path): for item in path.split("/"): dictionary = dictionary[item] return dictionary def pathSet(dictionary, path, value): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) dictionary[key] = value pathSet(env, path, value) return True
python
def patch_env(env, path, value): """ Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777 """ def pathGet(dictionary, path): for item in path.split("/"): dictionary = dictionary[item] return dictionary def pathSet(dictionary, path, value): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) dictionary[key] = value pathSet(env, path, value) return True
[ "def", "patch_env", "(", "env", ",", "path", ",", "value", ")", ":", "def", "pathGet", "(", "dictionary", ",", "path", ")", ":", "for", "item", "in", "path", ".", "split", "(", "\"/\"", ")", ":", "dictionary", "=", "dictionary", "[", "item", "]", "return", "dictionary", "def", "pathSet", "(", "dictionary", ",", "path", ",", "value", ")", ":", "path", "=", "path", ".", "split", "(", "\"/\"", ")", "key", "=", "path", "[", "-", "1", "]", "dictionary", "=", "pathGet", "(", "dictionary", ",", "\"/\"", ".", "join", "(", "path", "[", ":", "-", "1", "]", ")", ")", "dictionary", "[", "key", "]", "=", "value", "pathSet", "(", "env", ",", "path", ",", "value", ")", "return", "True" ]
Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777
[ "Set", "specified", "value", "to", "yaml", "path", ".", "Example", ":", "patch", "(", "application", "/", "components", "/", "child", "/", "configuration", "/", "__locator", ".", "application", "-", "id", "777", ")", "Will", "change", "child", "app", "ID", "to", "777" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L248-L266
qubell/contrib-python-qubell-client
qubell/api/tools/__init__.py
get_starter_kit_meta
def get_starter_kit_meta(name): """ Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu. Beware, config could be changed by deploy scripts during deploy. :param name: Name of starter kit :return: Link to metadata """ kits = yaml.safe_load(requests.get(url=starter_kits_url).content)['kits'] kits_meta_url = [x['metaUrl'] for x in kits if x['name'] == name] assert len(kits_meta_url)==1, "No component %s found in meta:\n %s" % (name, kits) meta = yaml.safe_load(requests.get(url=kits_meta_url[0]).content)['download_url'] return meta
python
def get_starter_kit_meta(name): """ Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu. Beware, config could be changed by deploy scripts during deploy. :param name: Name of starter kit :return: Link to metadata """ kits = yaml.safe_load(requests.get(url=starter_kits_url).content)['kits'] kits_meta_url = [x['metaUrl'] for x in kits if x['name'] == name] assert len(kits_meta_url)==1, "No component %s found in meta:\n %s" % (name, kits) meta = yaml.safe_load(requests.get(url=kits_meta_url[0]).content)['download_url'] return meta
[ "def", "get_starter_kit_meta", "(", "name", ")", ":", "kits", "=", "yaml", ".", "safe_load", "(", "requests", ".", "get", "(", "url", "=", "starter_kits_url", ")", ".", "content", ")", "[", "'kits'", "]", "kits_meta_url", "=", "[", "x", "[", "'metaUrl'", "]", "for", "x", "in", "kits", "if", "x", "[", "'name'", "]", "==", "name", "]", "assert", "len", "(", "kits_meta_url", ")", "==", "1", ",", "\"No component %s found in meta:\\n %s\"", "%", "(", "name", ",", "kits", ")", "meta", "=", "yaml", ".", "safe_load", "(", "requests", ".", "get", "(", "url", "=", "kits_meta_url", "[", "0", "]", ")", ".", "content", ")", "[", "'download_url'", "]", "return", "meta" ]
Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu. Beware, config could be changed by deploy scripts during deploy. :param name: Name of starter kit :return: Link to metadata
[ "Extract", "metadata", "link", "for", "starter", "kit", "from", "platform", "configs", ".", "Starter", "kit", "available", "on", "add", "component", "-", "starter", "kit", "menu", ".", "Beware", "config", "could", "be", "changed", "by", "deploy", "scripts", "during", "deploy", ".", ":", "param", "name", ":", "Name", "of", "starter", "kit", ":", "return", ":", "Link", "to", "metadata" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L298-L310
qubell/contrib-python-qubell-client
qubell/api/tools/__init__.py
get_manifest_from_meta
def get_manifest_from_meta(metaurl, name): """ Extact manifest url from metadata url :param metaurl: Url to metadata :param name: Name of application to extract :return: """ if 'http' in metaurl: kit = yaml.safe_load(requests.get(url=metaurl).content)['kit']['applications'] else: kit = yaml.safe_load(open(metaurl).read())['kit']['applications'] app_urls = [x['manifest'] for x in kit if x['name'] == name] assert len(app_urls) == 1 return app_urls[0]
python
def get_manifest_from_meta(metaurl, name): """ Extact manifest url from metadata url :param metaurl: Url to metadata :param name: Name of application to extract :return: """ if 'http' in metaurl: kit = yaml.safe_load(requests.get(url=metaurl).content)['kit']['applications'] else: kit = yaml.safe_load(open(metaurl).read())['kit']['applications'] app_urls = [x['manifest'] for x in kit if x['name'] == name] assert len(app_urls) == 1 return app_urls[0]
[ "def", "get_manifest_from_meta", "(", "metaurl", ",", "name", ")", ":", "if", "'http'", "in", "metaurl", ":", "kit", "=", "yaml", ".", "safe_load", "(", "requests", ".", "get", "(", "url", "=", "metaurl", ")", ".", "content", ")", "[", "'kit'", "]", "[", "'applications'", "]", "else", ":", "kit", "=", "yaml", ".", "safe_load", "(", "open", "(", "metaurl", ")", ".", "read", "(", ")", ")", "[", "'kit'", "]", "[", "'applications'", "]", "app_urls", "=", "[", "x", "[", "'manifest'", "]", "for", "x", "in", "kit", "if", "x", "[", "'name'", "]", "==", "name", "]", "assert", "len", "(", "app_urls", ")", "==", "1", "return", "app_urls", "[", "0", "]" ]
Extact manifest url from metadata url :param metaurl: Url to metadata :param name: Name of application to extract :return:
[ "Extact", "manifest", "url", "from", "metadata", "url", ":", "param", "metaurl", ":", "Url", "to", "metadata", ":", "param", "name", ":", "Name", "of", "application", "to", "extract", ":", "return", ":" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L312-L325
davidblaisonneau-orange/foreman
foreman/subItem.py
SubItem.getPayloadStruct
def getPayloadStruct(self, attributes, objType=None): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param key: The key to modify @param attribute: The data @param objType: NOT USED in this class @return RETURN: The API result """ if self.setInParentPayload: return {self.parentPayloadObject: {self.payloadObj: attributes}} else: return {self.payloadObj: attributes}
python
def getPayloadStruct(self, attributes, objType=None): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param key: The key to modify @param attribute: The data @param objType: NOT USED in this class @return RETURN: The API result """ if self.setInParentPayload: return {self.parentPayloadObject: {self.payloadObj: attributes}} else: return {self.payloadObj: attributes}
[ "def", "getPayloadStruct", "(", "self", ",", "attributes", ",", "objType", "=", "None", ")", ":", "if", "self", ".", "setInParentPayload", ":", "return", "{", "self", ".", "parentPayloadObject", ":", "{", "self", ".", "payloadObj", ":", "attributes", "}", "}", "else", ":", "return", "{", "self", ".", "payloadObj", ":", "attributes", "}" ]
Function getPayloadStruct Get the payload structure to do a creation or a modification @param key: The key to modify @param attribute: The data @param objType: NOT USED in this class @return RETURN: The API result
[ "Function", "getPayloadStruct", "Get", "the", "payload", "structure", "to", "do", "a", "creation", "or", "a", "modification" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/subItem.py#L50-L63
davidblaisonneau-orange/foreman
foreman/api.py
Api.log
def log(function): """ Function log Decorator to log lasts request before sending a new one @return RETURN: None """ def _log(self, *args, **kwargs): ret = function(self, *args, **kwargs) if len(self.history) > self.maxHistory: self.history = self.history[1:self.maxHistory] self.history.append({'errorMsg': self.errorMsg, 'payload': self.payload, 'url': self.url, 'resp': self.resp, 'res': self.res, 'printErrors': self.printErrors, 'method': self.method}) self.clearReqVars() return ret return _log
python
def log(function): """ Function log Decorator to log lasts request before sending a new one @return RETURN: None """ def _log(self, *args, **kwargs): ret = function(self, *args, **kwargs) if len(self.history) > self.maxHistory: self.history = self.history[1:self.maxHistory] self.history.append({'errorMsg': self.errorMsg, 'payload': self.payload, 'url': self.url, 'resp': self.resp, 'res': self.res, 'printErrors': self.printErrors, 'method': self.method}) self.clearReqVars() return ret return _log
[ "def", "log", "(", "function", ")", ":", "def", "_log", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "function", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "len", "(", "self", ".", "history", ")", ">", "self", ".", "maxHistory", ":", "self", ".", "history", "=", "self", ".", "history", "[", "1", ":", "self", ".", "maxHistory", "]", "self", ".", "history", ".", "append", "(", "{", "'errorMsg'", ":", "self", ".", "errorMsg", ",", "'payload'", ":", "self", ".", "payload", ",", "'url'", ":", "self", ".", "url", ",", "'resp'", ":", "self", ".", "resp", ",", "'res'", ":", "self", ".", "res", ",", "'printErrors'", ":", "self", ".", "printErrors", ",", "'method'", ":", "self", ".", "method", "}", ")", "self", ".", "clearReqVars", "(", ")", "return", "ret", "return", "_log" ]
Function log Decorator to log lasts request before sending a new one @return RETURN: None
[ "Function", "log", "Decorator", "to", "log", "lasts", "request", "before", "sending", "a", "new", "one" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L50-L69
davidblaisonneau-orange/foreman
foreman/api.py
Api.clearReqVars
def clearReqVars(self): """ Function clearHistVars Clear the variables used to get history of all vars @return RETURN: None """ self.errorMsg = None self.payload = None self.url = None self.resp = None self.res = None self.method = None self.printErrors = None
python
def clearReqVars(self): """ Function clearHistVars Clear the variables used to get history of all vars @return RETURN: None """ self.errorMsg = None self.payload = None self.url = None self.resp = None self.res = None self.method = None self.printErrors = None
[ "def", "clearReqVars", "(", "self", ")", ":", "self", ".", "errorMsg", "=", "None", "self", ".", "payload", "=", "None", "self", ".", "url", "=", "None", "self", ".", "resp", "=", "None", "self", ".", "res", "=", "None", "self", ".", "method", "=", "None", "self", ".", "printErrors", "=", "None" ]
Function clearHistVars Clear the variables used to get history of all vars @return RETURN: None
[ "Function", "clearHistVars", "Clear", "the", "variables", "used", "to", "get", "history", "of", "all", "vars" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L71-L83
davidblaisonneau-orange/foreman
foreman/api.py
Api.list
def list(self, obj, filter=False, only_id=False, limit=20): """ Function list Get the list of an object @param obj: object name ('hosts', 'puppetclasses'...) @param filter: filter for objects @param only_id: boolean to only return dict with name/id @return RETURN: the list of the object """ self.url = '{}{}/?per_page={}'.format(self.base_url, obj, limit) self.method = 'GET' if filter: self.url += '&search={}'.format(filter) self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if only_id: if self.__process_resp__(obj) is False: return False if type(self.res['results']) is list: return dict((x['name'], x['id']) for x in self.res['results']) elif type(self.res['results']) is dict: r = {} for v in self.res['results'].values(): for vv in v: r[vv['name']] = vv['id'] return r else: return False else: return self.__process_resp__(obj)
python
def list(self, obj, filter=False, only_id=False, limit=20): """ Function list Get the list of an object @param obj: object name ('hosts', 'puppetclasses'...) @param filter: filter for objects @param only_id: boolean to only return dict with name/id @return RETURN: the list of the object """ self.url = '{}{}/?per_page={}'.format(self.base_url, obj, limit) self.method = 'GET' if filter: self.url += '&search={}'.format(filter) self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if only_id: if self.__process_resp__(obj) is False: return False if type(self.res['results']) is list: return dict((x['name'], x['id']) for x in self.res['results']) elif type(self.res['results']) is dict: r = {} for v in self.res['results'].values(): for vv in v: r[vv['name']] = vv['id'] return r else: return False else: return self.__process_resp__(obj)
[ "def", "list", "(", "self", ",", "obj", ",", "filter", "=", "False", ",", "only_id", "=", "False", ",", "limit", "=", "20", ")", ":", "self", ".", "url", "=", "'{}{}/?per_page={}'", ".", "format", "(", "self", ".", "base_url", ",", "obj", ",", "limit", ")", "self", ".", "method", "=", "'GET'", "if", "filter", ":", "self", ".", "url", "+=", "'&search={}'", ".", "format", "(", "filter", ")", "self", ".", "resp", "=", "requests", ".", "get", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "cert", "=", "self", ".", "ca_cert", ")", "if", "only_id", ":", "if", "self", ".", "__process_resp__", "(", "obj", ")", "is", "False", ":", "return", "False", "if", "type", "(", "self", ".", "res", "[", "'results'", "]", ")", "is", "list", ":", "return", "dict", "(", "(", "x", "[", "'name'", "]", ",", "x", "[", "'id'", "]", ")", "for", "x", "in", "self", ".", "res", "[", "'results'", "]", ")", "elif", "type", "(", "self", ".", "res", "[", "'results'", "]", ")", "is", "dict", ":", "r", "=", "{", "}", "for", "v", "in", "self", ".", "res", "[", "'results'", "]", ".", "values", "(", ")", ":", "for", "vv", "in", "v", ":", "r", "[", "vv", "[", "'name'", "]", "]", "=", "vv", "[", "'id'", "]", "return", "r", "else", ":", "return", "False", "else", ":", "return", "self", ".", "__process_resp__", "(", "obj", ")" ]
Function list Get the list of an object @param obj: object name ('hosts', 'puppetclasses'...) @param filter: filter for objects @param only_id: boolean to only return dict with name/id @return RETURN: the list of the object
[ "Function", "list", "Get", "the", "list", "of", "an", "object" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L86-L115
davidblaisonneau-orange/foreman
foreman/api.py
Api.get
def get(self, obj, id, sub_object=None): """ Function get Get an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'GET' if sub_object: self.url += '/' + sub_object self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
python
def get(self, obj, id, sub_object=None): """ Function get Get an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'GET' if sub_object: self.url += '/' + sub_object self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
[ "def", "get", "(", "self", ",", "obj", ",", "id", ",", "sub_object", "=", "None", ")", ":", "self", ".", "url", "=", "'{}{}/{}'", ".", "format", "(", "self", ".", "base_url", ",", "obj", ",", "id", ")", "self", ".", "method", "=", "'GET'", "if", "sub_object", ":", "self", ".", "url", "+=", "'/'", "+", "sub_object", "self", ".", "resp", "=", "requests", ".", "get", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "cert", "=", "self", ".", "ca_cert", ")", "if", "self", ".", "__process_resp__", "(", "obj", ")", ":", "return", "self", ".", "res", "return", "False" ]
Function get Get an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object
[ "Function", "get", "Get", "an", "object", "by", "id" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L118-L134
davidblaisonneau-orange/foreman
foreman/api.py
Api.get_id_by_name
def get_id_by_name(self, obj, name): """ Function get_id_by_name Get the id of an object @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ list = self.list(obj, filter='name = "{}"'.format(name), only_id=True, limit=1) return list[name] if name in list.keys() else False
python
def get_id_by_name(self, obj, name): """ Function get_id_by_name Get the id of an object @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ list = self.list(obj, filter='name = "{}"'.format(name), only_id=True, limit=1) return list[name] if name in list.keys() else False
[ "def", "get_id_by_name", "(", "self", ",", "obj", ",", "name", ")", ":", "list", "=", "self", ".", "list", "(", "obj", ",", "filter", "=", "'name = \"{}\"'", ".", "format", "(", "name", ")", ",", "only_id", "=", "True", ",", "limit", "=", "1", ")", "return", "list", "[", "name", "]", "if", "name", "in", "list", ".", "keys", "(", ")", "else", "False" ]
Function get_id_by_name Get the id of an object @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object
[ "Function", "get_id_by_name", "Get", "the", "id", "of", "an", "object" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L137-L147
davidblaisonneau-orange/foreman
foreman/api.py
Api.set
def set(self, obj, id, payload, action='', async=False): """ Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'PUT' if action: self.url += '/{}'.format(action) self.payload = json.dumps(payload) if async: session = FuturesSession() return session.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) else: self.resp = requests.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
python
def set(self, obj, id, payload, action='', async=False): """ Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'PUT' if action: self.url += '/{}'.format(action) self.payload = json.dumps(payload) if async: session = FuturesSession() return session.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) else: self.resp = requests.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
[ "def", "set", "(", "self", ",", "obj", ",", "id", ",", "payload", ",", "action", "=", "''", ",", "async", "=", "False", ")", ":", "self", ".", "url", "=", "'{}{}/{}'", ".", "format", "(", "self", ".", "base_url", ",", "obj", ",", "id", ")", "self", ".", "method", "=", "'PUT'", "if", "action", ":", "self", ".", "url", "+=", "'/{}'", ".", "format", "(", "action", ")", "self", ".", "payload", "=", "json", ".", "dumps", "(", "payload", ")", "if", "async", ":", "session", "=", "FuturesSession", "(", ")", "return", "session", ".", "put", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "data", "=", "self", ".", "payload", ",", "cert", "=", "self", ".", "ca_cert", ")", "else", ":", "self", ".", "resp", "=", "requests", ".", "put", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "data", "=", "self", ".", "payload", ",", "cert", "=", "self", ".", "ca_cert", ")", "if", "self", ".", "__process_resp__", "(", "obj", ")", ":", "return", "self", ".", "res", "return", "False" ]
Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response
[ "Function", "set", "Set", "an", "object", "by", "id" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L150-L178
davidblaisonneau-orange/foreman
foreman/api.py
Api.create
def create(self, obj, payload, async=False): """ Function create Create an new object @param obj: object name ('hosts', 'puppetclasses'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = self.base_url + obj self.method = 'POST' self.payload = json.dumps(payload) if async: self.method = 'POST(Async)' session = FuturesSession() self.resp = session.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.resp else: self.resp = requests.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.__process_resp__(obj)
python
def create(self, obj, payload, async=False): """ Function create Create an new object @param obj: object name ('hosts', 'puppetclasses'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = self.base_url + obj self.method = 'POST' self.payload = json.dumps(payload) if async: self.method = 'POST(Async)' session = FuturesSession() self.resp = session.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.resp else: self.resp = requests.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.__process_resp__(obj)
[ "def", "create", "(", "self", ",", "obj", ",", "payload", ",", "async", "=", "False", ")", ":", "self", ".", "url", "=", "self", ".", "base_url", "+", "obj", "self", ".", "method", "=", "'POST'", "self", ".", "payload", "=", "json", ".", "dumps", "(", "payload", ")", "if", "async", ":", "self", ".", "method", "=", "'POST(Async)'", "session", "=", "FuturesSession", "(", ")", "self", ".", "resp", "=", "session", ".", "post", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "data", "=", "self", ".", "payload", ",", "cert", "=", "self", ".", "ca_cert", ")", "return", "self", ".", "resp", "else", ":", "self", ".", "resp", "=", "requests", ".", "post", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "data", "=", "self", ".", "payload", ",", "cert", "=", "self", ".", "ca_cert", ")", "return", "self", ".", "__process_resp__", "(", "obj", ")" ]
Function create Create an new object @param obj: object name ('hosts', 'puppetclasses'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response
[ "Function", "create", "Create", "an", "new", "object" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L181-L205
davidblaisonneau-orange/foreman
foreman/api.py
Api.delete
def delete(self, obj, id): """ Function delete Delete an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'DELETE' self.resp = requests.delete(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) return self.__process_resp__(obj)
python
def delete(self, obj, id): """ Function delete Delete an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'DELETE' self.resp = requests.delete(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) return self.__process_resp__(obj)
[ "def", "delete", "(", "self", ",", "obj", ",", "id", ")", ":", "self", ".", "url", "=", "'{}{}/{}'", ".", "format", "(", "self", ".", "base_url", ",", "obj", ",", "id", ")", "self", ".", "method", "=", "'DELETE'", "self", ".", "resp", "=", "requests", ".", "delete", "(", "url", "=", "self", ".", "url", ",", "auth", "=", "self", ".", "auth", ",", "headers", "=", "self", ".", "headers", ",", "cert", "=", "self", ".", "ca_cert", ")", "return", "self", ".", "__process_resp__", "(", "obj", ")" ]
Function delete Delete an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the server response
[ "Function", "delete", "Delete", "an", "object", "by", "id" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/api.py#L208-L221
MatterMiners/cobald
cobald/daemon/runners/thread_runner.py
CapturingThread.run
def run(self): """Modified ``run`` that captures return value and exceptions from ``target``""" try: if self._target: return_value = self._target(*self._args, **self._kwargs) if return_value is not None: self._exception = OrphanedReturn(self, return_value) except BaseException as err: self._exception = err finally: # Avoid a refcycle if the thread is running a function with # an argument that has a member that points to the thread. del self._target, self._args, self._kwargs
python
def run(self): """Modified ``run`` that captures return value and exceptions from ``target``""" try: if self._target: return_value = self._target(*self._args, **self._kwargs) if return_value is not None: self._exception = OrphanedReturn(self, return_value) except BaseException as err: self._exception = err finally: # Avoid a refcycle if the thread is running a function with # an argument that has a member that points to the thread. del self._target, self._args, self._kwargs
[ "def", "run", "(", "self", ")", ":", "try", ":", "if", "self", ".", "_target", ":", "return_value", "=", "self", ".", "_target", "(", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")", "if", "return_value", "is", "not", "None", ":", "self", ".", "_exception", "=", "OrphanedReturn", "(", "self", ",", "return_value", ")", "except", "BaseException", "as", "err", ":", "self", ".", "_exception", "=", "err", "finally", ":", "# Avoid a refcycle if the thread is running a function with", "# an argument that has a member that points to the thread.", "del", "self", ".", "_target", ",", "self", ".", "_args", ",", "self", ".", "_kwargs" ]
Modified ``run`` that captures return value and exceptions from ``target``
[ "Modified", "run", "that", "captures", "return", "value", "and", "exceptions", "from", "target" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/thread_runner.py#L24-L36
MatterMiners/cobald
cobald/daemon/runners/thread_runner.py
ThreadRunner._start_payloads
def _start_payloads(self): """Start all queued payloads""" with self._lock: payloads = self._payloads.copy() self._payloads.clear() for subroutine in payloads: thread = CapturingThread(target=subroutine) thread.start() self._threads.add(thread) self._logger.debug('booted thread %s', thread) time.sleep(0)
python
def _start_payloads(self): """Start all queued payloads""" with self._lock: payloads = self._payloads.copy() self._payloads.clear() for subroutine in payloads: thread = CapturingThread(target=subroutine) thread.start() self._threads.add(thread) self._logger.debug('booted thread %s', thread) time.sleep(0)
[ "def", "_start_payloads", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "payloads", "=", "self", ".", "_payloads", ".", "copy", "(", ")", "self", ".", "_payloads", ".", "clear", "(", ")", "for", "subroutine", "in", "payloads", ":", "thread", "=", "CapturingThread", "(", "target", "=", "subroutine", ")", "thread", ".", "start", "(", ")", "self", ".", "_threads", ".", "add", "(", "thread", ")", "self", ".", "_logger", ".", "debug", "(", "'booted thread %s'", ",", "thread", ")", "time", ".", "sleep", "(", "0", ")" ]
Start all queued payloads
[ "Start", "all", "queued", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/thread_runner.py#L61-L71
MatterMiners/cobald
cobald/daemon/runners/thread_runner.py
ThreadRunner._reap_payloads
def _reap_payloads(self): """Clean up all finished payloads""" for thread in self._threads.copy(): # CapturingThread.join will throw if thread.join(timeout=0): self._threads.remove(thread) self._logger.debug('reaped thread %s', thread)
python
def _reap_payloads(self): """Clean up all finished payloads""" for thread in self._threads.copy(): # CapturingThread.join will throw if thread.join(timeout=0): self._threads.remove(thread) self._logger.debug('reaped thread %s', thread)
[ "def", "_reap_payloads", "(", "self", ")", ":", "for", "thread", "in", "self", ".", "_threads", ".", "copy", "(", ")", ":", "# CapturingThread.join will throw", "if", "thread", ".", "join", "(", "timeout", "=", "0", ")", ":", "self", ".", "_threads", ".", "remove", "(", "thread", ")", "self", ".", "_logger", ".", "debug", "(", "'reaped thread %s'", ",", "thread", ")" ]
Clean up all finished payloads
[ "Clean", "up", "all", "finished", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/thread_runner.py#L73-L79
teaearlgraycold/puni
puni/decorators.py
update_cache
def update_cache(func): """Decorate functions that modify the internally stored usernotes JSON. Ensures that updates are mirrored onto reddit. Arguments: func: the function being decorated """ @wraps(func) def wrapper(self, *args, **kwargs): """The wrapper function.""" lazy = kwargs.get('lazy', False) kwargs.pop('lazy', None) if not lazy: self.get_json() ret = func(self, *args, **kwargs) # If returning a string assume it is an update message if isinstance(ret, str) and not lazy: self.set_json(ret) else: return ret return wrapper
python
def update_cache(func): """Decorate functions that modify the internally stored usernotes JSON. Ensures that updates are mirrored onto reddit. Arguments: func: the function being decorated """ @wraps(func) def wrapper(self, *args, **kwargs): """The wrapper function.""" lazy = kwargs.get('lazy', False) kwargs.pop('lazy', None) if not lazy: self.get_json() ret = func(self, *args, **kwargs) # If returning a string assume it is an update message if isinstance(ret, str) and not lazy: self.set_json(ret) else: return ret return wrapper
[ "def", "update_cache", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"The wrapper function.\"\"\"", "lazy", "=", "kwargs", ".", "get", "(", "'lazy'", ",", "False", ")", "kwargs", ".", "pop", "(", "'lazy'", ",", "None", ")", "if", "not", "lazy", ":", "self", ".", "get_json", "(", ")", "ret", "=", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# If returning a string assume it is an update message", "if", "isinstance", "(", "ret", ",", "str", ")", "and", "not", "lazy", ":", "self", ".", "set_json", "(", "ret", ")", "else", ":", "return", "ret", "return", "wrapper" ]
Decorate functions that modify the internally stored usernotes JSON. Ensures that updates are mirrored onto reddit. Arguments: func: the function being decorated
[ "Decorate", "functions", "that", "modify", "the", "internally", "stored", "usernotes", "JSON", "." ]
train
https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/decorators.py#L21-L46
MatterMiners/cobald
cobald/interfaces/_controller.py
Controller.s
def s(cls: Type[C], *args, **kwargs) -> Partial[C]: """ Create an unbound prototype of this class, partially applying arguments .. code:: python controller = Controller.s(interval=20) pipeline = controller(rate=10) >> pool """ return Partial(cls, *args, **kwargs)
python
def s(cls: Type[C], *args, **kwargs) -> Partial[C]: """ Create an unbound prototype of this class, partially applying arguments .. code:: python controller = Controller.s(interval=20) pipeline = controller(rate=10) >> pool """ return Partial(cls, *args, **kwargs)
[ "def", "s", "(", "cls", ":", "Type", "[", "C", "]", ",", "*", "args", ",", "*", "*", "kwargs", ")", "->", "Partial", "[", "C", "]", ":", "return", "Partial", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Create an unbound prototype of this class, partially applying arguments .. code:: python controller = Controller.s(interval=20) pipeline = controller(rate=10) >> pool
[ "Create", "an", "unbound", "prototype", "of", "this", "class", "partially", "applying", "arguments" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/interfaces/_controller.py#L21-L31
josiah-wolf-oberholtzer/uqbar
uqbar/apis/InheritanceGraph.py
InheritanceGraph._build_mappings
def _build_mappings( self, classes: Sequence[type] ) -> Tuple[Mapping[type, Sequence[type]], Mapping[type, Sequence[type]]]: """ Collect all bases and organize into parent/child mappings. """ parents_to_children: MutableMapping[type, Set[type]] = {} children_to_parents: MutableMapping[type, Set[type]] = {} visited_classes: Set[type] = set() class_stack = list(classes) while class_stack: class_ = class_stack.pop() if class_ in visited_classes: continue visited_classes.add(class_) for base in class_.__bases__: if base not in visited_classes: class_stack.append(base) parents_to_children.setdefault(base, set()).add(class_) children_to_parents.setdefault(class_, set()).add(base) sorted_parents_to_children: MutableMapping[ type, List[type] ] = collections.OrderedDict() for parent, children in sorted( parents_to_children.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_parents_to_children[parent] = sorted( children, key=lambda x: (x.__module__, x.__name__) ) sorted_children_to_parents: MutableMapping[ type, List[type] ] = collections.OrderedDict() for child, parents in sorted( children_to_parents.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_children_to_parents[child] = sorted( parents, key=lambda x: (x.__module__, x.__name__) ) return sorted_parents_to_children, sorted_children_to_parents
python
def _build_mappings( self, classes: Sequence[type] ) -> Tuple[Mapping[type, Sequence[type]], Mapping[type, Sequence[type]]]: """ Collect all bases and organize into parent/child mappings. """ parents_to_children: MutableMapping[type, Set[type]] = {} children_to_parents: MutableMapping[type, Set[type]] = {} visited_classes: Set[type] = set() class_stack = list(classes) while class_stack: class_ = class_stack.pop() if class_ in visited_classes: continue visited_classes.add(class_) for base in class_.__bases__: if base not in visited_classes: class_stack.append(base) parents_to_children.setdefault(base, set()).add(class_) children_to_parents.setdefault(class_, set()).add(base) sorted_parents_to_children: MutableMapping[ type, List[type] ] = collections.OrderedDict() for parent, children in sorted( parents_to_children.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_parents_to_children[parent] = sorted( children, key=lambda x: (x.__module__, x.__name__) ) sorted_children_to_parents: MutableMapping[ type, List[type] ] = collections.OrderedDict() for child, parents in sorted( children_to_parents.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_children_to_parents[child] = sorted( parents, key=lambda x: (x.__module__, x.__name__) ) return sorted_parents_to_children, sorted_children_to_parents
[ "def", "_build_mappings", "(", "self", ",", "classes", ":", "Sequence", "[", "type", "]", ")", "->", "Tuple", "[", "Mapping", "[", "type", ",", "Sequence", "[", "type", "]", "]", ",", "Mapping", "[", "type", ",", "Sequence", "[", "type", "]", "]", "]", ":", "parents_to_children", ":", "MutableMapping", "[", "type", ",", "Set", "[", "type", "]", "]", "=", "{", "}", "children_to_parents", ":", "MutableMapping", "[", "type", ",", "Set", "[", "type", "]", "]", "=", "{", "}", "visited_classes", ":", "Set", "[", "type", "]", "=", "set", "(", ")", "class_stack", "=", "list", "(", "classes", ")", "while", "class_stack", ":", "class_", "=", "class_stack", ".", "pop", "(", ")", "if", "class_", "in", "visited_classes", ":", "continue", "visited_classes", ".", "add", "(", "class_", ")", "for", "base", "in", "class_", ".", "__bases__", ":", "if", "base", "not", "in", "visited_classes", ":", "class_stack", ".", "append", "(", "base", ")", "parents_to_children", ".", "setdefault", "(", "base", ",", "set", "(", ")", ")", ".", "add", "(", "class_", ")", "children_to_parents", ".", "setdefault", "(", "class_", ",", "set", "(", ")", ")", ".", "add", "(", "base", ")", "sorted_parents_to_children", ":", "MutableMapping", "[", "type", ",", "List", "[", "type", "]", "]", "=", "collections", ".", "OrderedDict", "(", ")", "for", "parent", ",", "children", "in", "sorted", "(", "parents_to_children", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "(", "x", "[", "0", "]", ".", "__module__", ",", "x", "[", "0", "]", ".", "__name__", ")", ")", ":", "sorted_parents_to_children", "[", "parent", "]", "=", "sorted", "(", "children", ",", "key", "=", "lambda", "x", ":", "(", "x", ".", "__module__", ",", "x", ".", "__name__", ")", ")", "sorted_children_to_parents", ":", "MutableMapping", "[", "type", ",", "List", "[", "type", "]", "]", "=", "collections", ".", "OrderedDict", "(", ")", "for", "child", ",", "parents", "in", "sorted", "(", "children_to_parents", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "(", "x", "[", "0", "]", ".", "__module__", ",", "x", "[", "0", "]", ".", "__name__", ")", ")", ":", "sorted_children_to_parents", "[", "child", "]", "=", "sorted", "(", "parents", ",", "key", "=", "lambda", "x", ":", "(", "x", ".", "__module__", ",", "x", ".", "__name__", ")", ")", "return", "sorted_parents_to_children", ",", "sorted_children_to_parents" ]
Collect all bases and organize into parent/child mappings.
[ "Collect", "all", "bases", "and", "organize", "into", "parent", "/", "child", "mappings", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/apis/InheritanceGraph.py#L347-L385
josiah-wolf-oberholtzer/uqbar
uqbar/apis/InheritanceGraph.py
InheritanceGraph._collect_classes
def _collect_classes( self, package_paths: Sequence[str], recurse_subpackages: bool = True ) -> Sequence[type]: """ Collect all classes defined in/under ``package_paths``. """ import uqbar.apis classes = [] initial_source_paths: Set[str] = set() # Graph source paths and classes for path in package_paths: try: module = importlib.import_module(path) if hasattr(module, "__path__"): initial_source_paths.update(getattr(module, "__path__")) else: initial_source_paths.add(module.__file__) except ModuleNotFoundError: path, _, class_name = path.rpartition(".") module = importlib.import_module(path) classes.append(getattr(module, class_name)) # Iterate source paths for source_path in uqbar.apis.collect_source_paths( initial_source_paths, recurse_subpackages=recurse_subpackages ): package_path = uqbar.apis.source_path_to_package_path(source_path) module = importlib.import_module(package_path) # Grab any defined classes for name in dir(module): if name.startswith("_"): continue object_ = getattr(module, name) if isinstance(object_, type) and object_.__module__ == module.__name__: classes.append(object_) return sorted(classes, key=lambda x: (x.__module__, x.__name__))
python
def _collect_classes( self, package_paths: Sequence[str], recurse_subpackages: bool = True ) -> Sequence[type]: """ Collect all classes defined in/under ``package_paths``. """ import uqbar.apis classes = [] initial_source_paths: Set[str] = set() # Graph source paths and classes for path in package_paths: try: module = importlib.import_module(path) if hasattr(module, "__path__"): initial_source_paths.update(getattr(module, "__path__")) else: initial_source_paths.add(module.__file__) except ModuleNotFoundError: path, _, class_name = path.rpartition(".") module = importlib.import_module(path) classes.append(getattr(module, class_name)) # Iterate source paths for source_path in uqbar.apis.collect_source_paths( initial_source_paths, recurse_subpackages=recurse_subpackages ): package_path = uqbar.apis.source_path_to_package_path(source_path) module = importlib.import_module(package_path) # Grab any defined classes for name in dir(module): if name.startswith("_"): continue object_ = getattr(module, name) if isinstance(object_, type) and object_.__module__ == module.__name__: classes.append(object_) return sorted(classes, key=lambda x: (x.__module__, x.__name__))
[ "def", "_collect_classes", "(", "self", ",", "package_paths", ":", "Sequence", "[", "str", "]", ",", "recurse_subpackages", ":", "bool", "=", "True", ")", "->", "Sequence", "[", "type", "]", ":", "import", "uqbar", ".", "apis", "classes", "=", "[", "]", "initial_source_paths", ":", "Set", "[", "str", "]", "=", "set", "(", ")", "# Graph source paths and classes", "for", "path", "in", "package_paths", ":", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "path", ")", "if", "hasattr", "(", "module", ",", "\"__path__\"", ")", ":", "initial_source_paths", ".", "update", "(", "getattr", "(", "module", ",", "\"__path__\"", ")", ")", "else", ":", "initial_source_paths", ".", "add", "(", "module", ".", "__file__", ")", "except", "ModuleNotFoundError", ":", "path", ",", "_", ",", "class_name", "=", "path", ".", "rpartition", "(", "\".\"", ")", "module", "=", "importlib", ".", "import_module", "(", "path", ")", "classes", ".", "append", "(", "getattr", "(", "module", ",", "class_name", ")", ")", "# Iterate source paths", "for", "source_path", "in", "uqbar", ".", "apis", ".", "collect_source_paths", "(", "initial_source_paths", ",", "recurse_subpackages", "=", "recurse_subpackages", ")", ":", "package_path", "=", "uqbar", ".", "apis", ".", "source_path_to_package_path", "(", "source_path", ")", "module", "=", "importlib", ".", "import_module", "(", "package_path", ")", "# Grab any defined classes", "for", "name", "in", "dir", "(", "module", ")", ":", "if", "name", ".", "startswith", "(", "\"_\"", ")", ":", "continue", "object_", "=", "getattr", "(", "module", ",", "name", ")", "if", "isinstance", "(", "object_", ",", "type", ")", "and", "object_", ".", "__module__", "==", "module", ".", "__name__", ":", "classes", ".", "append", "(", "object_", ")", "return", "sorted", "(", "classes", ",", "key", "=", "lambda", "x", ":", "(", "x", ".", "__module__", ",", "x", ".", "__name__", ")", ")" ]
Collect all classes defined in/under ``package_paths``.
[ "Collect", "all", "classes", "defined", "in", "/", "under", "package_paths", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/apis/InheritanceGraph.py#L387-L422
quantmind/agile-toolkit
agiletoolkit/api/__init__.py
get_auth
def get_auth(): """Return a tuple for authenticating a user If not successful raise ``AgileError``. """ auth = get_auth_from_env() if auth[0] and auth[1]: return auth home = os.path.expanduser("~") config = os.path.join(home, '.gitconfig') if not os.path.isfile(config): raise GithubException('No .gitconfig available') parser = configparser.ConfigParser() parser.read(config) if 'user' in parser: user = parser['user'] if 'username' not in user: raise GithubException('Specify username in %s user ' 'section' % config) if 'token' not in user: raise GithubException('Specify token in %s user section' % config) return user['username'], user['token'] else: raise GithubException('No user section in %s' % config)
python
def get_auth(): """Return a tuple for authenticating a user If not successful raise ``AgileError``. """ auth = get_auth_from_env() if auth[0] and auth[1]: return auth home = os.path.expanduser("~") config = os.path.join(home, '.gitconfig') if not os.path.isfile(config): raise GithubException('No .gitconfig available') parser = configparser.ConfigParser() parser.read(config) if 'user' in parser: user = parser['user'] if 'username' not in user: raise GithubException('Specify username in %s user ' 'section' % config) if 'token' not in user: raise GithubException('Specify token in %s user section' % config) return user['username'], user['token'] else: raise GithubException('No user section in %s' % config)
[ "def", "get_auth", "(", ")", ":", "auth", "=", "get_auth_from_env", "(", ")", "if", "auth", "[", "0", "]", "and", "auth", "[", "1", "]", ":", "return", "auth", "home", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "config", "=", "os", ".", "path", ".", "join", "(", "home", ",", "'.gitconfig'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "config", ")", ":", "raise", "GithubException", "(", "'No .gitconfig available'", ")", "parser", "=", "configparser", ".", "ConfigParser", "(", ")", "parser", ".", "read", "(", "config", ")", "if", "'user'", "in", "parser", ":", "user", "=", "parser", "[", "'user'", "]", "if", "'username'", "not", "in", "user", ":", "raise", "GithubException", "(", "'Specify username in %s user '", "'section'", "%", "config", ")", "if", "'token'", "not", "in", "user", ":", "raise", "GithubException", "(", "'Specify token in %s user section'", "%", "config", ")", "return", "user", "[", "'username'", "]", ",", "user", "[", "'token'", "]", "else", ":", "raise", "GithubException", "(", "'No user section in %s'", "%", "config", ")" ]
Return a tuple for authenticating a user If not successful raise ``AgileError``.
[ "Return", "a", "tuple", "for", "authenticating", "a", "user" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/__init__.py#L40-L66
davidblaisonneau-orange/foreman
foreman/architectures.py
Architectures.checkAndCreate
def checkAndCreate(self, key, payload, osIds): """ Function checkAndCreate Check if an architectures exists and create it if not @param key: The targeted architectures @param payload: The targeted architectures description @param osIds: The list of os ids liked with this architecture @return RETURN: The id of the object """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False #~ To be sure the OS list is good, we ensure our os are in the list for os in self[key]['operatingsystems']: osIds.add(os['id']) self[key]["operatingsystem_ids"] = list(osIds) if (len(self[key]['operatingsystems']) is not len(osIds)): return False return oid
python
def checkAndCreate(self, key, payload, osIds): """ Function checkAndCreate Check if an architectures exists and create it if not @param key: The targeted architectures @param payload: The targeted architectures description @param osIds: The list of os ids liked with this architecture @return RETURN: The id of the object """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False #~ To be sure the OS list is good, we ensure our os are in the list for os in self[key]['operatingsystems']: osIds.add(os['id']) self[key]["operatingsystem_ids"] = list(osIds) if (len(self[key]['operatingsystems']) is not len(osIds)): return False return oid
[ "def", "checkAndCreate", "(", "self", ",", "key", ",", "payload", ",", "osIds", ")", ":", "if", "key", "not", "in", "self", ":", "self", "[", "key", "]", "=", "payload", "oid", "=", "self", "[", "key", "]", "[", "'id'", "]", "if", "not", "oid", ":", "return", "False", "#~ To be sure the OS list is good, we ensure our os are in the list", "for", "os", "in", "self", "[", "key", "]", "[", "'operatingsystems'", "]", ":", "osIds", ".", "add", "(", "os", "[", "'id'", "]", ")", "self", "[", "key", "]", "[", "\"operatingsystem_ids\"", "]", "=", "list", "(", "osIds", ")", "if", "(", "len", "(", "self", "[", "key", "]", "[", "'operatingsystems'", "]", ")", "is", "not", "len", "(", "osIds", ")", ")", ":", "return", "False", "return", "oid" ]
Function checkAndCreate Check if an architectures exists and create it if not @param key: The targeted architectures @param payload: The targeted architectures description @param osIds: The list of os ids liked with this architecture @return RETURN: The id of the object
[ "Function", "checkAndCreate", "Check", "if", "an", "architectures", "exists", "and", "create", "it", "if", "not" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/architectures.py#L29-L49
DataMedSci/mcpartools
setup.py
pip_command_output
def pip_command_output(pip_args): """ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results """ import sys import pip from io import StringIO # as pip will write to stdout we use some nasty hacks # to substitute system stdout with our own old_stdout = sys.stdout sys.stdout = mystdout = StringIO() pip.main(pip_args) output = mystdout.getvalue() mystdout.truncate(0) sys.stdout = old_stdout return output
python
def pip_command_output(pip_args): """ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results """ import sys import pip from io import StringIO # as pip will write to stdout we use some nasty hacks # to substitute system stdout with our own old_stdout = sys.stdout sys.stdout = mystdout = StringIO() pip.main(pip_args) output = mystdout.getvalue() mystdout.truncate(0) sys.stdout = old_stdout return output
[ "def", "pip_command_output", "(", "pip_args", ")", ":", "import", "sys", "import", "pip", "from", "io", "import", "StringIO", "# as pip will write to stdout we use some nasty hacks", "# to substitute system stdout with our own", "old_stdout", "=", "sys", ".", "stdout", "sys", ".", "stdout", "=", "mystdout", "=", "StringIO", "(", ")", "pip", ".", "main", "(", "pip_args", ")", "output", "=", "mystdout", ".", "getvalue", "(", ")", "mystdout", ".", "truncate", "(", "0", ")", "sys", ".", "stdout", "=", "old_stdout", "return", "output" ]
Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results
[ "Get", "output", "(", "as", "a", "string", ")", "from", "pip", "command", ":", "param", "pip_args", ":", "list", "o", "pip", "switches", "to", "pass", ":", "return", ":", "string", "with", "results" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/setup.py#L5-L22
DataMedSci/mcpartools
setup.py
setup_versioneer
def setup_versioneer(): """ Generate (temporarily) versioneer.py file in project root directory :return: """ try: # assume versioneer.py was generated using "versioneer install" command import versioneer versioneer.get_version() except ImportError: # it looks versioneer.py is missing # lets assume that versioneer package is installed # and versioneer binary is present in $PATH import subprocess try: # call versioneer install to generate versioneer.py subprocess.check_output(["versioneer", "install"]) except OSError: # it looks versioneer is missing from $PATH # probably versioneer is installed in some user directory # query pip for list of files in versioneer package # line below is equivalen to putting result of # "pip show -f versioneer" command to string output output = pip_command_output(["show", "-f", "versioneer"]) # now we parse the results import os # find absolute path where *versioneer package* was installed # and store it in main_path main_path = [x[len("Location: "):] for x in output.splitlines() if x.startswith("Location")][0] # find path relative to main_path where # *versioneer binary* was installed bin_path = [x[len(" "):] for x in output.splitlines() if x.endswith(os.path.sep + "versioneer")][0] # exe_path is absolute path to *versioneer binary* exe_path = os.path.join(main_path, bin_path) # call versioneer install to generate versioneer.py # line below is equivalent to running in terminal # "python versioneer install" subprocess.check_output(["python", exe_path, "install"])
python
def setup_versioneer(): """ Generate (temporarily) versioneer.py file in project root directory :return: """ try: # assume versioneer.py was generated using "versioneer install" command import versioneer versioneer.get_version() except ImportError: # it looks versioneer.py is missing # lets assume that versioneer package is installed # and versioneer binary is present in $PATH import subprocess try: # call versioneer install to generate versioneer.py subprocess.check_output(["versioneer", "install"]) except OSError: # it looks versioneer is missing from $PATH # probably versioneer is installed in some user directory # query pip for list of files in versioneer package # line below is equivalen to putting result of # "pip show -f versioneer" command to string output output = pip_command_output(["show", "-f", "versioneer"]) # now we parse the results import os # find absolute path where *versioneer package* was installed # and store it in main_path main_path = [x[len("Location: "):] for x in output.splitlines() if x.startswith("Location")][0] # find path relative to main_path where # *versioneer binary* was installed bin_path = [x[len(" "):] for x in output.splitlines() if x.endswith(os.path.sep + "versioneer")][0] # exe_path is absolute path to *versioneer binary* exe_path = os.path.join(main_path, bin_path) # call versioneer install to generate versioneer.py # line below is equivalent to running in terminal # "python versioneer install" subprocess.check_output(["python", exe_path, "install"])
[ "def", "setup_versioneer", "(", ")", ":", "try", ":", "# assume versioneer.py was generated using \"versioneer install\" command", "import", "versioneer", "versioneer", ".", "get_version", "(", ")", "except", "ImportError", ":", "# it looks versioneer.py is missing", "# lets assume that versioneer package is installed", "# and versioneer binary is present in $PATH", "import", "subprocess", "try", ":", "# call versioneer install to generate versioneer.py", "subprocess", ".", "check_output", "(", "[", "\"versioneer\"", ",", "\"install\"", "]", ")", "except", "OSError", ":", "# it looks versioneer is missing from $PATH", "# probably versioneer is installed in some user directory", "# query pip for list of files in versioneer package", "# line below is equivalen to putting result of", "# \"pip show -f versioneer\" command to string output", "output", "=", "pip_command_output", "(", "[", "\"show\"", ",", "\"-f\"", ",", "\"versioneer\"", "]", ")", "# now we parse the results", "import", "os", "# find absolute path where *versioneer package* was installed", "# and store it in main_path", "main_path", "=", "[", "x", "[", "len", "(", "\"Location: \"", ")", ":", "]", "for", "x", "in", "output", ".", "splitlines", "(", ")", "if", "x", ".", "startswith", "(", "\"Location\"", ")", "]", "[", "0", "]", "# find path relative to main_path where", "# *versioneer binary* was installed", "bin_path", "=", "[", "x", "[", "len", "(", "\" \"", ")", ":", "]", "for", "x", "in", "output", ".", "splitlines", "(", ")", "if", "x", ".", "endswith", "(", "os", ".", "path", ".", "sep", "+", "\"versioneer\"", ")", "]", "[", "0", "]", "# exe_path is absolute path to *versioneer binary*", "exe_path", "=", "os", ".", "path", ".", "join", "(", "main_path", ",", "bin_path", ")", "# call versioneer install to generate versioneer.py", "# line below is equivalent to running in terminal", "# \"python versioneer install\"", "subprocess", ".", "check_output", "(", "[", "\"python\"", ",", "exe_path", ",", "\"install\"", "]", ")" ]
Generate (temporarily) versioneer.py file in project root directory :return:
[ "Generate", "(", "temporarily", ")", "versioneer", ".", "py", "file", "in", "project", "root", "directory", ":", "return", ":" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/setup.py#L25-L67
DataMedSci/mcpartools
setup.py
clean_cache
def clean_cache(): """ Python won't realise that new module has appeared in the runtime We need to clean the cache of module finders. Hacking again :return: """ import importlib try: # Python ver < 3.3 vermod = importlib.import_module("versioneer") globals()["versioneer"] = vermod except ImportError: importlib.invalidate_caches()
python
def clean_cache(): """ Python won't realise that new module has appeared in the runtime We need to clean the cache of module finders. Hacking again :return: """ import importlib try: # Python ver < 3.3 vermod = importlib.import_module("versioneer") globals()["versioneer"] = vermod except ImportError: importlib.invalidate_caches()
[ "def", "clean_cache", "(", ")", ":", "import", "importlib", "try", ":", "# Python ver < 3.3", "vermod", "=", "importlib", ".", "import_module", "(", "\"versioneer\"", ")", "globals", "(", ")", "[", "\"versioneer\"", "]", "=", "vermod", "except", "ImportError", ":", "importlib", ".", "invalidate_caches", "(", ")" ]
Python won't realise that new module has appeared in the runtime We need to clean the cache of module finders. Hacking again :return:
[ "Python", "won", "t", "realise", "that", "new", "module", "has", "appeared", "in", "the", "runtime", "We", "need", "to", "clean", "the", "cache", "of", "module", "finders", ".", "Hacking", "again", ":", "return", ":" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/setup.py#L70-L81
DataMedSci/mcpartools
setup.py
get_version
def get_version(): """ Get project version (using versioneer) :return: string containing version """ setup_versioneer() clean_cache() import versioneer version = versioneer.get_version() parsed_version = parse_version(version) if '*@' in str(parsed_version): import time version += str(int(time.time())) return version
python
def get_version(): """ Get project version (using versioneer) :return: string containing version """ setup_versioneer() clean_cache() import versioneer version = versioneer.get_version() parsed_version = parse_version(version) if '*@' in str(parsed_version): import time version += str(int(time.time())) return version
[ "def", "get_version", "(", ")", ":", "setup_versioneer", "(", ")", "clean_cache", "(", ")", "import", "versioneer", "version", "=", "versioneer", ".", "get_version", "(", ")", "parsed_version", "=", "parse_version", "(", "version", ")", "if", "'*@'", "in", "str", "(", "parsed_version", ")", ":", "import", "time", "version", "+=", "str", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")", "return", "version" ]
Get project version (using versioneer) :return: string containing version
[ "Get", "project", "version", "(", "using", "versioneer", ")", ":", "return", ":", "string", "containing", "version" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/setup.py#L84-L97
josiah-wolf-oberholtzer/uqbar
uqbar/io/__init__.py
find_common_prefix
def find_common_prefix( paths: Sequence[Union[str, pathlib.Path]] ) -> Optional[pathlib.Path]: """ Find the common prefix of two or more paths. :: >>> import pathlib >>> one = pathlib.Path('foo/bar/baz') >>> two = pathlib.Path('foo/quux/biz') >>> three = pathlib.Path('foo/quux/wuux') :: >>> import uqbar.io >>> str(uqbar.io.find_common_prefix([one, two, three])) 'foo' :param paths: paths to inspect """ counter: collections.Counter = collections.Counter() for path in paths: path = pathlib.Path(path) counter.update([path]) counter.update(path.parents) valid_paths = sorted( [path for path, count in counter.items() if count >= len(paths)], key=lambda x: len(x.parts), ) if valid_paths: return valid_paths[-1] return None
python
def find_common_prefix( paths: Sequence[Union[str, pathlib.Path]] ) -> Optional[pathlib.Path]: """ Find the common prefix of two or more paths. :: >>> import pathlib >>> one = pathlib.Path('foo/bar/baz') >>> two = pathlib.Path('foo/quux/biz') >>> three = pathlib.Path('foo/quux/wuux') :: >>> import uqbar.io >>> str(uqbar.io.find_common_prefix([one, two, three])) 'foo' :param paths: paths to inspect """ counter: collections.Counter = collections.Counter() for path in paths: path = pathlib.Path(path) counter.update([path]) counter.update(path.parents) valid_paths = sorted( [path for path, count in counter.items() if count >= len(paths)], key=lambda x: len(x.parts), ) if valid_paths: return valid_paths[-1] return None
[ "def", "find_common_prefix", "(", "paths", ":", "Sequence", "[", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", "]", ")", "->", "Optional", "[", "pathlib", ".", "Path", "]", ":", "counter", ":", "collections", ".", "Counter", "=", "collections", ".", "Counter", "(", ")", "for", "path", "in", "paths", ":", "path", "=", "pathlib", ".", "Path", "(", "path", ")", "counter", ".", "update", "(", "[", "path", "]", ")", "counter", ".", "update", "(", "path", ".", "parents", ")", "valid_paths", "=", "sorted", "(", "[", "path", "for", "path", ",", "count", "in", "counter", ".", "items", "(", ")", "if", "count", ">=", "len", "(", "paths", ")", "]", ",", "key", "=", "lambda", "x", ":", "len", "(", "x", ".", "parts", ")", ",", ")", "if", "valid_paths", ":", "return", "valid_paths", "[", "-", "1", "]", "return", "None" ]
Find the common prefix of two or more paths. :: >>> import pathlib >>> one = pathlib.Path('foo/bar/baz') >>> two = pathlib.Path('foo/quux/biz') >>> three = pathlib.Path('foo/quux/wuux') :: >>> import uqbar.io >>> str(uqbar.io.find_common_prefix([one, two, three])) 'foo' :param paths: paths to inspect
[ "Find", "the", "common", "prefix", "of", "two", "or", "more", "paths", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/io/__init__.py#L16-L48
josiah-wolf-oberholtzer/uqbar
uqbar/io/__init__.py
find_executable
def find_executable(name: str, flags=os.X_OK) -> List[str]: r"""Finds executable `name`. Similar to Unix ``which`` command. Returns list of zero or more full paths to `name`. """ result = [] extensions = [x for x in os.environ.get("PATHEXT", "").split(os.pathsep) if x] path = os.environ.get("PATH", None) if path is None: return [] for path in os.environ.get("PATH", "").split(os.pathsep): path = os.path.join(path, name) if os.access(path, flags): result.append(path) for extension in extensions: path_extension = path + extension if os.access(path_extension, flags): result.append(path_extension) return result
python
def find_executable(name: str, flags=os.X_OK) -> List[str]: r"""Finds executable `name`. Similar to Unix ``which`` command. Returns list of zero or more full paths to `name`. """ result = [] extensions = [x for x in os.environ.get("PATHEXT", "").split(os.pathsep) if x] path = os.environ.get("PATH", None) if path is None: return [] for path in os.environ.get("PATH", "").split(os.pathsep): path = os.path.join(path, name) if os.access(path, flags): result.append(path) for extension in extensions: path_extension = path + extension if os.access(path_extension, flags): result.append(path_extension) return result
[ "def", "find_executable", "(", "name", ":", "str", ",", "flags", "=", "os", ".", "X_OK", ")", "->", "List", "[", "str", "]", ":", "result", "=", "[", "]", "extensions", "=", "[", "x", "for", "x", "in", "os", ".", "environ", ".", "get", "(", "\"PATHEXT\"", ",", "\"\"", ")", ".", "split", "(", "os", ".", "pathsep", ")", "if", "x", "]", "path", "=", "os", ".", "environ", ".", "get", "(", "\"PATH\"", ",", "None", ")", "if", "path", "is", "None", ":", "return", "[", "]", "for", "path", "in", "os", ".", "environ", ".", "get", "(", "\"PATH\"", ",", "\"\"", ")", ".", "split", "(", "os", ".", "pathsep", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "name", ")", "if", "os", ".", "access", "(", "path", ",", "flags", ")", ":", "result", ".", "append", "(", "path", ")", "for", "extension", "in", "extensions", ":", "path_extension", "=", "path", "+", "extension", "if", "os", ".", "access", "(", "path_extension", ",", "flags", ")", ":", "result", ".", "append", "(", "path_extension", ")", "return", "result" ]
r"""Finds executable `name`. Similar to Unix ``which`` command. Returns list of zero or more full paths to `name`.
[ "r", "Finds", "executable", "name", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/io/__init__.py#L51-L71
josiah-wolf-oberholtzer/uqbar
uqbar/io/__init__.py
relative_to
def relative_to( source_path: Union[str, pathlib.Path], target_path: Union[str, pathlib.Path] ) -> pathlib.Path: """ Generates relative path from ``source_path`` to ``target_path``. Handles the case of paths without a common prefix. :: >>> import pathlib >>> source = pathlib.Path('foo/bar/baz') >>> target = pathlib.Path('foo/quux/biz') :: >>> target.relative_to(source) Traceback (most recent call last): ... ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz' :: >>> import uqbar.io >>> str(uqbar.io.relative_to(source, target)) '../../quux/biz' :param source_path: the source path :param target_path: the target path """ source_path = pathlib.Path(source_path).absolute() if source_path.is_file(): source_path = source_path.parent target_path = pathlib.Path(target_path).absolute() common_prefix = find_common_prefix([source_path, target_path]) if not common_prefix: raise ValueError("No common prefix") source_path = source_path.relative_to(common_prefix) target_path = target_path.relative_to(common_prefix) result = pathlib.Path(*[".."] * len(source_path.parts)) return result / target_path
python
def relative_to( source_path: Union[str, pathlib.Path], target_path: Union[str, pathlib.Path] ) -> pathlib.Path: """ Generates relative path from ``source_path`` to ``target_path``. Handles the case of paths without a common prefix. :: >>> import pathlib >>> source = pathlib.Path('foo/bar/baz') >>> target = pathlib.Path('foo/quux/biz') :: >>> target.relative_to(source) Traceback (most recent call last): ... ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz' :: >>> import uqbar.io >>> str(uqbar.io.relative_to(source, target)) '../../quux/biz' :param source_path: the source path :param target_path: the target path """ source_path = pathlib.Path(source_path).absolute() if source_path.is_file(): source_path = source_path.parent target_path = pathlib.Path(target_path).absolute() common_prefix = find_common_prefix([source_path, target_path]) if not common_prefix: raise ValueError("No common prefix") source_path = source_path.relative_to(common_prefix) target_path = target_path.relative_to(common_prefix) result = pathlib.Path(*[".."] * len(source_path.parts)) return result / target_path
[ "def", "relative_to", "(", "source_path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ",", "target_path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ")", "->", "pathlib", ".", "Path", ":", "source_path", "=", "pathlib", ".", "Path", "(", "source_path", ")", ".", "absolute", "(", ")", "if", "source_path", ".", "is_file", "(", ")", ":", "source_path", "=", "source_path", ".", "parent", "target_path", "=", "pathlib", ".", "Path", "(", "target_path", ")", ".", "absolute", "(", ")", "common_prefix", "=", "find_common_prefix", "(", "[", "source_path", ",", "target_path", "]", ")", "if", "not", "common_prefix", ":", "raise", "ValueError", "(", "\"No common prefix\"", ")", "source_path", "=", "source_path", ".", "relative_to", "(", "common_prefix", ")", "target_path", "=", "target_path", ".", "relative_to", "(", "common_prefix", ")", "result", "=", "pathlib", ".", "Path", "(", "*", "[", "\"..\"", "]", "*", "len", "(", "source_path", ".", "parts", ")", ")", "return", "result", "/", "target_path" ]
Generates relative path from ``source_path`` to ``target_path``. Handles the case of paths without a common prefix. :: >>> import pathlib >>> source = pathlib.Path('foo/bar/baz') >>> target = pathlib.Path('foo/quux/biz') :: >>> target.relative_to(source) Traceback (most recent call last): ... ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz' :: >>> import uqbar.io >>> str(uqbar.io.relative_to(source, target)) '../../quux/biz' :param source_path: the source path :param target_path: the target path
[ "Generates", "relative", "path", "from", "source_path", "to", "target_path", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/io/__init__.py#L74-L114
josiah-wolf-oberholtzer/uqbar
uqbar/io/__init__.py
walk
def walk( root_path: Union[str, pathlib.Path], top_down: bool = True ) -> Generator[ Tuple[pathlib.Path, Sequence[pathlib.Path], Sequence[pathlib.Path]], None, None ]: """ Walks a directory tree. Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path` instead of strings. :param root_path: foo :param top_down: bar """ root_path = pathlib.Path(root_path) directory_paths, file_paths = [], [] for path in sorted(root_path.iterdir()): if path.is_dir(): directory_paths.append(path) else: file_paths.append(path) if top_down: yield root_path, directory_paths, file_paths for directory_path in directory_paths: yield from walk(directory_path, top_down=top_down) if not top_down: yield root_path, directory_paths, file_paths
python
def walk( root_path: Union[str, pathlib.Path], top_down: bool = True ) -> Generator[ Tuple[pathlib.Path, Sequence[pathlib.Path], Sequence[pathlib.Path]], None, None ]: """ Walks a directory tree. Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path` instead of strings. :param root_path: foo :param top_down: bar """ root_path = pathlib.Path(root_path) directory_paths, file_paths = [], [] for path in sorted(root_path.iterdir()): if path.is_dir(): directory_paths.append(path) else: file_paths.append(path) if top_down: yield root_path, directory_paths, file_paths for directory_path in directory_paths: yield from walk(directory_path, top_down=top_down) if not top_down: yield root_path, directory_paths, file_paths
[ "def", "walk", "(", "root_path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ",", "top_down", ":", "bool", "=", "True", ")", "->", "Generator", "[", "Tuple", "[", "pathlib", ".", "Path", ",", "Sequence", "[", "pathlib", ".", "Path", "]", ",", "Sequence", "[", "pathlib", ".", "Path", "]", "]", ",", "None", ",", "None", "]", ":", "root_path", "=", "pathlib", ".", "Path", "(", "root_path", ")", "directory_paths", ",", "file_paths", "=", "[", "]", ",", "[", "]", "for", "path", "in", "sorted", "(", "root_path", ".", "iterdir", "(", ")", ")", ":", "if", "path", ".", "is_dir", "(", ")", ":", "directory_paths", ".", "append", "(", "path", ")", "else", ":", "file_paths", ".", "append", "(", "path", ")", "if", "top_down", ":", "yield", "root_path", ",", "directory_paths", ",", "file_paths", "for", "directory_path", "in", "directory_paths", ":", "yield", "from", "walk", "(", "directory_path", ",", "top_down", "=", "top_down", ")", "if", "not", "top_down", ":", "yield", "root_path", ",", "directory_paths", ",", "file_paths" ]
Walks a directory tree. Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path` instead of strings. :param root_path: foo :param top_down: bar
[ "Walks", "a", "directory", "tree", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/io/__init__.py#L117-L143
josiah-wolf-oberholtzer/uqbar
uqbar/io/__init__.py
write
def write( contents: str, path: Union[str, pathlib.Path], verbose: bool = False, logger_func=None, ) -> bool: """ Writes ``contents`` to ``path``. Checks if ``path`` already exists and only write out new contents if the old contents do not match. Creates any intermediate missing directories. :param contents: the file contents to write :param path: the path to write to :param verbose: whether to print output """ print_func = logger_func or print path = pathlib.Path(path) if path.exists(): with path.open("r") as file_pointer: old_contents = file_pointer.read() if old_contents == contents: if verbose: print_func("preserved {}".format(path)) return False else: with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("rewrote {}".format(path)) return True elif not path.exists(): if not path.parent.exists(): path.parent.mkdir(parents=True) with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("wrote {}".format(path)) return True
python
def write( contents: str, path: Union[str, pathlib.Path], verbose: bool = False, logger_func=None, ) -> bool: """ Writes ``contents`` to ``path``. Checks if ``path`` already exists and only write out new contents if the old contents do not match. Creates any intermediate missing directories. :param contents: the file contents to write :param path: the path to write to :param verbose: whether to print output """ print_func = logger_func or print path = pathlib.Path(path) if path.exists(): with path.open("r") as file_pointer: old_contents = file_pointer.read() if old_contents == contents: if verbose: print_func("preserved {}".format(path)) return False else: with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("rewrote {}".format(path)) return True elif not path.exists(): if not path.parent.exists(): path.parent.mkdir(parents=True) with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("wrote {}".format(path)) return True
[ "def", "write", "(", "contents", ":", "str", ",", "path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ",", "verbose", ":", "bool", "=", "False", ",", "logger_func", "=", "None", ",", ")", "->", "bool", ":", "print_func", "=", "logger_func", "or", "print", "path", "=", "pathlib", ".", "Path", "(", "path", ")", "if", "path", ".", "exists", "(", ")", ":", "with", "path", ".", "open", "(", "\"r\"", ")", "as", "file_pointer", ":", "old_contents", "=", "file_pointer", ".", "read", "(", ")", "if", "old_contents", "==", "contents", ":", "if", "verbose", ":", "print_func", "(", "\"preserved {}\"", ".", "format", "(", "path", ")", ")", "return", "False", "else", ":", "with", "path", ".", "open", "(", "\"w\"", ")", "as", "file_pointer", ":", "file_pointer", ".", "write", "(", "contents", ")", "if", "verbose", ":", "print_func", "(", "\"rewrote {}\"", ".", "format", "(", "path", ")", ")", "return", "True", "elif", "not", "path", ".", "exists", "(", ")", ":", "if", "not", "path", ".", "parent", ".", "exists", "(", ")", ":", "path", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ")", "with", "path", ".", "open", "(", "\"w\"", ")", "as", "file_pointer", ":", "file_pointer", ".", "write", "(", "contents", ")", "if", "verbose", ":", "print_func", "(", "\"wrote {}\"", ".", "format", "(", "path", ")", ")", "return", "True" ]
Writes ``contents`` to ``path``. Checks if ``path`` already exists and only write out new contents if the old contents do not match. Creates any intermediate missing directories. :param contents: the file contents to write :param path: the path to write to :param verbose: whether to print output
[ "Writes", "contents", "to", "path", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/io/__init__.py#L146-L186
MatterMiners/cobald
cobald/daemon/debug.py
pretty_ref
def pretty_ref(obj: Any) -> str: """Pretty object reference using ``module.path:qual.name`` format""" try: return obj.__module__ + ':' + obj.__qualname__ except AttributeError: return pretty_ref(type(obj)) + '(...)'
python
def pretty_ref(obj: Any) -> str: """Pretty object reference using ``module.path:qual.name`` format""" try: return obj.__module__ + ':' + obj.__qualname__ except AttributeError: return pretty_ref(type(obj)) + '(...)'
[ "def", "pretty_ref", "(", "obj", ":", "Any", ")", "->", "str", ":", "try", ":", "return", "obj", ".", "__module__", "+", "':'", "+", "obj", ".", "__qualname__", "except", "AttributeError", ":", "return", "pretty_ref", "(", "type", "(", "obj", ")", ")", "+", "'(...)'" ]
Pretty object reference using ``module.path:qual.name`` format
[ "Pretty", "object", "reference", "using", "module", ".", "path", ":", "qual", ".", "name", "format" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/debug.py#L8-L13
quantmind/agile-toolkit
agiletoolkit/github/remote.py
remote
def remote(ctx): """Display repo github path """ with command(): m = RepoManager(ctx.obj['agile']) click.echo(m.github_repo().repo_path)
python
def remote(ctx): """Display repo github path """ with command(): m = RepoManager(ctx.obj['agile']) click.echo(m.github_repo().repo_path)
[ "def", "remote", "(", "ctx", ")", ":", "with", "command", "(", ")", ":", "m", "=", "RepoManager", "(", "ctx", ".", "obj", "[", "'agile'", "]", ")", "click", ".", "echo", "(", "m", ".", "github_repo", "(", ")", ".", "repo_path", ")" ]
Display repo github path
[ "Display", "repo", "github", "path" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/github/remote.py#L9-L14
josiah-wolf-oberholtzer/uqbar
uqbar/containers/UniqueTreeNode.py
UniqueTreeNode.graph_order
def graph_order(self): """ Get graph-order tuple for node. :: >>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode >>> root_container = UniqueTreeContainer(name="root") >>> outer_container = UniqueTreeContainer(name="outer") >>> inner_container = UniqueTreeContainer(name="inner") >>> node_a = UniqueTreeNode(name="a") >>> node_b = UniqueTreeNode(name="b") >>> node_c = UniqueTreeNode(name="c") >>> node_d = UniqueTreeNode(name="d") >>> root_container.extend([node_a, outer_container]) >>> outer_container.extend([inner_container, node_d]) >>> inner_container.extend([node_b, node_c]) :: >>> for node in root_container.depth_first(): ... print(node.name, node.graph_order) ... a (0,) outer (1,) inner (1, 0) b (1, 0, 0) c (1, 0, 1) d (1, 1) """ parentage = tuple(reversed(self.parentage)) graph_order = [] for i in range(len(parentage) - 1): parent, child = parentage[i : i + 2] graph_order.append(parent.index(child)) return tuple(graph_order)
python
def graph_order(self): """ Get graph-order tuple for node. :: >>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode >>> root_container = UniqueTreeContainer(name="root") >>> outer_container = UniqueTreeContainer(name="outer") >>> inner_container = UniqueTreeContainer(name="inner") >>> node_a = UniqueTreeNode(name="a") >>> node_b = UniqueTreeNode(name="b") >>> node_c = UniqueTreeNode(name="c") >>> node_d = UniqueTreeNode(name="d") >>> root_container.extend([node_a, outer_container]) >>> outer_container.extend([inner_container, node_d]) >>> inner_container.extend([node_b, node_c]) :: >>> for node in root_container.depth_first(): ... print(node.name, node.graph_order) ... a (0,) outer (1,) inner (1, 0) b (1, 0, 0) c (1, 0, 1) d (1, 1) """ parentage = tuple(reversed(self.parentage)) graph_order = [] for i in range(len(parentage) - 1): parent, child = parentage[i : i + 2] graph_order.append(parent.index(child)) return tuple(graph_order)
[ "def", "graph_order", "(", "self", ")", ":", "parentage", "=", "tuple", "(", "reversed", "(", "self", ".", "parentage", ")", ")", "graph_order", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "parentage", ")", "-", "1", ")", ":", "parent", ",", "child", "=", "parentage", "[", "i", ":", "i", "+", "2", "]", "graph_order", ".", "append", "(", "parent", ".", "index", "(", "child", ")", ")", "return", "tuple", "(", "graph_order", ")" ]
Get graph-order tuple for node. :: >>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode >>> root_container = UniqueTreeContainer(name="root") >>> outer_container = UniqueTreeContainer(name="outer") >>> inner_container = UniqueTreeContainer(name="inner") >>> node_a = UniqueTreeNode(name="a") >>> node_b = UniqueTreeNode(name="b") >>> node_c = UniqueTreeNode(name="c") >>> node_d = UniqueTreeNode(name="d") >>> root_container.extend([node_a, outer_container]) >>> outer_container.extend([inner_container, node_d]) >>> inner_container.extend([node_b, node_c]) :: >>> for node in root_container.depth_first(): ... print(node.name, node.graph_order) ... a (0,) outer (1,) inner (1, 0) b (1, 0, 0) c (1, 0, 1) d (1, 1)
[ "Get", "graph", "-", "order", "tuple", "for", "node", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/containers/UniqueTreeNode.py#L118-L154
xenadevel/PyXenaManager
xenamanager/api/XenaSocket.py
XenaSocket.sendQuery
def sendQuery(self, cmd, multilines=False): """ Send command, wait for response (single or multi lines), test for errors and return the returned code. :param cmd: command to send :param multilines: True - multiline response, False - single line response. :return: command return value. """ self.logger.debug("sendQuery(%s)", cmd) if not self.is_connected(): raise socket.error("sendQuery on a disconnected socket") if multilines: replies = self.__sendQueryReplies(cmd) for reply in replies: if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, replies)) self.logger.debug("sendQuery(%s) -- Begin", cmd) for l in replies: self.logger.debug("%s", l.strip()) self.logger.debug("sendQuery(%s) -- End", cmd) return replies else: reply = self.__sendQueryReply(cmd) if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, reply)) self.logger.debug('sendQuery(%s) reply(%s)', cmd, reply) return reply
python
def sendQuery(self, cmd, multilines=False): """ Send command, wait for response (single or multi lines), test for errors and return the returned code. :param cmd: command to send :param multilines: True - multiline response, False - single line response. :return: command return value. """ self.logger.debug("sendQuery(%s)", cmd) if not self.is_connected(): raise socket.error("sendQuery on a disconnected socket") if multilines: replies = self.__sendQueryReplies(cmd) for reply in replies: if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, replies)) self.logger.debug("sendQuery(%s) -- Begin", cmd) for l in replies: self.logger.debug("%s", l.strip()) self.logger.debug("sendQuery(%s) -- End", cmd) return replies else: reply = self.__sendQueryReply(cmd) if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, reply)) self.logger.debug('sendQuery(%s) reply(%s)', cmd, reply) return reply
[ "def", "sendQuery", "(", "self", ",", "cmd", ",", "multilines", "=", "False", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"sendQuery(%s)\"", ",", "cmd", ")", "if", "not", "self", ".", "is_connected", "(", ")", ":", "raise", "socket", ".", "error", "(", "\"sendQuery on a disconnected socket\"", ")", "if", "multilines", ":", "replies", "=", "self", ".", "__sendQueryReplies", "(", "cmd", ")", "for", "reply", "in", "replies", ":", "if", "reply", ".", "startswith", "(", "XenaSocket", ".", "reply_errors", ")", ":", "raise", "XenaCommandException", "(", "'sendQuery({}) reply({})'", ".", "format", "(", "cmd", ",", "replies", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"sendQuery(%s) -- Begin\"", ",", "cmd", ")", "for", "l", "in", "replies", ":", "self", ".", "logger", ".", "debug", "(", "\"%s\"", ",", "l", ".", "strip", "(", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"sendQuery(%s) -- End\"", ",", "cmd", ")", "return", "replies", "else", ":", "reply", "=", "self", ".", "__sendQueryReply", "(", "cmd", ")", "if", "reply", ".", "startswith", "(", "XenaSocket", ".", "reply_errors", ")", ":", "raise", "XenaCommandException", "(", "'sendQuery({}) reply({})'", ".", "format", "(", "cmd", ",", "reply", ")", ")", "self", ".", "logger", ".", "debug", "(", "'sendQuery(%s) reply(%s)'", ",", "cmd", ",", "reply", ")", "return", "reply" ]
Send command, wait for response (single or multi lines), test for errors and return the returned code. :param cmd: command to send :param multilines: True - multiline response, False - single line response. :return: command return value.
[ "Send", "command", "wait", "for", "response", "(", "single", "or", "multi", "lines", ")", "test", "for", "errors", "and", "return", "the", "returned", "code", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/api/XenaSocket.py#L97-L123
xenadevel/PyXenaManager
xenamanager/api/XenaSocket.py
XenaSocket.sendQueryVerify
def sendQueryVerify(self, cmd): """ Send command without return value, wait for completion, verify success. :param cmd: command to send """ cmd = cmd.strip() self.logger.debug("sendQueryVerify(%s)", cmd) if not self.is_connected(): raise socket.error("sendQueryVerify on a disconnected socket") resp = self.__sendQueryReply(cmd) if resp != self.reply_ok: raise XenaCommandException('Command {} Fail Expected {} Actual {}'.format(cmd, self.reply_ok, resp)) self.logger.debug("SendQueryVerify(%s) Succeed", cmd)
python
def sendQueryVerify(self, cmd): """ Send command without return value, wait for completion, verify success. :param cmd: command to send """ cmd = cmd.strip() self.logger.debug("sendQueryVerify(%s)", cmd) if not self.is_connected(): raise socket.error("sendQueryVerify on a disconnected socket") resp = self.__sendQueryReply(cmd) if resp != self.reply_ok: raise XenaCommandException('Command {} Fail Expected {} Actual {}'.format(cmd, self.reply_ok, resp)) self.logger.debug("SendQueryVerify(%s) Succeed", cmd)
[ "def", "sendQueryVerify", "(", "self", ",", "cmd", ")", ":", "cmd", "=", "cmd", ".", "strip", "(", ")", "self", ".", "logger", ".", "debug", "(", "\"sendQueryVerify(%s)\"", ",", "cmd", ")", "if", "not", "self", ".", "is_connected", "(", ")", ":", "raise", "socket", ".", "error", "(", "\"sendQueryVerify on a disconnected socket\"", ")", "resp", "=", "self", ".", "__sendQueryReply", "(", "cmd", ")", "if", "resp", "!=", "self", ".", "reply_ok", ":", "raise", "XenaCommandException", "(", "'Command {} Fail Expected {} Actual {}'", ".", "format", "(", "cmd", ",", "self", ".", "reply_ok", ",", "resp", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"SendQueryVerify(%s) Succeed\"", ",", "cmd", ")" ]
Send command without return value, wait for completion, verify success. :param cmd: command to send
[ "Send", "command", "without", "return", "value", "wait", "for", "completion", "verify", "success", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/api/XenaSocket.py#L125-L138
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit.find_external_files
def find_external_files(self, run_input_dir): """ Scan all SHIELDHIT12A config files to find external files used and return them. Also change paths in config files to match convention that all resources are symlinked in job_xxxx/symlink """ beam_file, geo_file, mat_file, _ = self.input_files # check for external files in BEAM input file external_beam_files = self._parse_beam_file(beam_file, run_input_dir) if external_beam_files: logger.info("External files from BEAM file: {0}".format(external_beam_files)) else: logger.debug("No external files from BEAM file") # check for external files in MAT input file icru_numbers = self._parse_mat_file(mat_file) if icru_numbers: logger.info("External files from MAT file: {0}".format(icru_numbers)) else: logger.debug("No external files from MAT file") # if ICRU+LOADEX pairs were found - get file names for external material files icru_files = [] if icru_numbers: icru_files = self._decrypt_icru_files(icru_numbers) # check for external files in GEO input file geo_files = self._parse_geo_file(geo_file, run_input_dir) if geo_files: logger.info("External files from GEO file: {0}".format(geo_files)) else: logger.debug("No external files from GEO file") external_files = external_beam_files + icru_files + geo_files return [os.path.join(self.input_path, e) for e in external_files]
python
def find_external_files(self, run_input_dir): """ Scan all SHIELDHIT12A config files to find external files used and return them. Also change paths in config files to match convention that all resources are symlinked in job_xxxx/symlink """ beam_file, geo_file, mat_file, _ = self.input_files # check for external files in BEAM input file external_beam_files = self._parse_beam_file(beam_file, run_input_dir) if external_beam_files: logger.info("External files from BEAM file: {0}".format(external_beam_files)) else: logger.debug("No external files from BEAM file") # check for external files in MAT input file icru_numbers = self._parse_mat_file(mat_file) if icru_numbers: logger.info("External files from MAT file: {0}".format(icru_numbers)) else: logger.debug("No external files from MAT file") # if ICRU+LOADEX pairs were found - get file names for external material files icru_files = [] if icru_numbers: icru_files = self._decrypt_icru_files(icru_numbers) # check for external files in GEO input file geo_files = self._parse_geo_file(geo_file, run_input_dir) if geo_files: logger.info("External files from GEO file: {0}".format(geo_files)) else: logger.debug("No external files from GEO file") external_files = external_beam_files + icru_files + geo_files return [os.path.join(self.input_path, e) for e in external_files]
[ "def", "find_external_files", "(", "self", ",", "run_input_dir", ")", ":", "beam_file", ",", "geo_file", ",", "mat_file", ",", "_", "=", "self", ".", "input_files", "# check for external files in BEAM input file", "external_beam_files", "=", "self", ".", "_parse_beam_file", "(", "beam_file", ",", "run_input_dir", ")", "if", "external_beam_files", ":", "logger", ".", "info", "(", "\"External files from BEAM file: {0}\"", ".", "format", "(", "external_beam_files", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"No external files from BEAM file\"", ")", "# check for external files in MAT input file", "icru_numbers", "=", "self", ".", "_parse_mat_file", "(", "mat_file", ")", "if", "icru_numbers", ":", "logger", ".", "info", "(", "\"External files from MAT file: {0}\"", ".", "format", "(", "icru_numbers", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"No external files from MAT file\"", ")", "# if ICRU+LOADEX pairs were found - get file names for external material files", "icru_files", "=", "[", "]", "if", "icru_numbers", ":", "icru_files", "=", "self", ".", "_decrypt_icru_files", "(", "icru_numbers", ")", "# check for external files in GEO input file", "geo_files", "=", "self", ".", "_parse_geo_file", "(", "geo_file", ",", "run_input_dir", ")", "if", "geo_files", ":", "logger", ".", "info", "(", "\"External files from GEO file: {0}\"", ".", "format", "(", "geo_files", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"No external files from GEO file\"", ")", "external_files", "=", "external_beam_files", "+", "icru_files", "+", "geo_files", "return", "[", "os", ".", "path", ".", "join", "(", "self", ".", "input_path", ",", "e", ")", "for", "e", "in", "external_files", "]" ]
Scan all SHIELDHIT12A config files to find external files used and return them. Also change paths in config files to match convention that all resources are symlinked in job_xxxx/symlink
[ "Scan", "all", "SHIELDHIT12A", "config", "files", "to", "find", "external", "files", "used", "and", "return", "them", ".", "Also", "change", "paths", "in", "config", "files", "to", "match", "convention", "that", "all", "resources", "are", "symlinked", "in", "job_xxxx", "/", "symlink" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L75-L109
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._parse_beam_file
def _parse_beam_file(self, file_path, run_input_dir): """Scan SH12A BEAM file for references to external files and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as beam_f: for line in beam_f.readlines(): split_line = line.split() # line length checking to prevent IndexError if len(split_line) > 2 and split_line[0] == "USEBMOD": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[2])) external_files.append(split_line[2]) paths_to_replace.append(split_line[2]) elif len(split_line) > 1 and split_line[0] == "USECBEAM": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[1])) external_files.append(split_line[1]) paths_to_replace.append(split_line[1]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
python
def _parse_beam_file(self, file_path, run_input_dir): """Scan SH12A BEAM file for references to external files and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as beam_f: for line in beam_f.readlines(): split_line = line.split() # line length checking to prevent IndexError if len(split_line) > 2 and split_line[0] == "USEBMOD": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[2])) external_files.append(split_line[2]) paths_to_replace.append(split_line[2]) elif len(split_line) > 1 and split_line[0] == "USECBEAM": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[1])) external_files.append(split_line[1]) paths_to_replace.append(split_line[1]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
[ "def", "_parse_beam_file", "(", "self", ",", "file_path", ",", "run_input_dir", ")", ":", "external_files", "=", "[", "]", "paths_to_replace", "=", "[", "]", "with", "open", "(", "file_path", ",", "'r'", ")", "as", "beam_f", ":", "for", "line", "in", "beam_f", ".", "readlines", "(", ")", ":", "split_line", "=", "line", ".", "split", "(", ")", "# line length checking to prevent IndexError", "if", "len", "(", "split_line", ")", ">", "2", "and", "split_line", "[", "0", "]", "==", "\"USEBMOD\"", ":", "logger", ".", "debug", "(", "\"Found reference to external file in BEAM file: {0} {1}\"", ".", "format", "(", "split_line", "[", "0", "]", ",", "split_line", "[", "2", "]", ")", ")", "external_files", ".", "append", "(", "split_line", "[", "2", "]", ")", "paths_to_replace", ".", "append", "(", "split_line", "[", "2", "]", ")", "elif", "len", "(", "split_line", ")", ">", "1", "and", "split_line", "[", "0", "]", "==", "\"USECBEAM\"", ":", "logger", ".", "debug", "(", "\"Found reference to external file in BEAM file: {0} {1}\"", ".", "format", "(", "split_line", "[", "0", "]", ",", "split_line", "[", "1", "]", ")", ")", "external_files", ".", "append", "(", "split_line", "[", "1", "]", ")", "paths_to_replace", ".", "append", "(", "split_line", "[", "1", "]", ")", "if", "paths_to_replace", ":", "run_dir_config_file", "=", "os", ".", "path", ".", "join", "(", "run_input_dir", ",", "os", ".", "path", ".", "split", "(", "file_path", ")", "[", "-", "1", "]", ")", "logger", ".", "debug", "(", "\"Calling rewrite_paths method on file: {0}\"", ".", "format", "(", "run_dir_config_file", ")", ")", "self", ".", "_rewrite_paths_in_file", "(", "run_dir_config_file", ",", "paths_to_replace", ")", "return", "external_files" ]
Scan SH12A BEAM file for references to external files and return them
[ "Scan", "SH12A", "BEAM", "file", "for", "references", "to", "external", "files", "and", "return", "them" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L111-L133
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._parse_geo_file
def _parse_geo_file(self, file_path, run_input_dir): """Scan SH12A GEO file for references to external files (like voxelised geometry) and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as geo_f: for line in geo_f.readlines(): split_line = line.split() if len(split_line) > 0 and not line.startswith("*"): base_path = os.path.join(self.input_path, split_line[0]) if os.path.isfile(base_path + '.hed'): logger.debug("Found ctx + hed files: {0}".format(base_path)) external_files.append(base_path + '.hed') # try to find ctx file if os.path.isfile(base_path + '.ctx'): external_files.append(base_path + '.ctx') elif os.path.isfile(base_path + '.ctx.gz'): external_files.append(base_path + '.ctx.gz') # replace path to match symlink location paths_to_replace.append(split_line[0]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
python
def _parse_geo_file(self, file_path, run_input_dir): """Scan SH12A GEO file for references to external files (like voxelised geometry) and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as geo_f: for line in geo_f.readlines(): split_line = line.split() if len(split_line) > 0 and not line.startswith("*"): base_path = os.path.join(self.input_path, split_line[0]) if os.path.isfile(base_path + '.hed'): logger.debug("Found ctx + hed files: {0}".format(base_path)) external_files.append(base_path + '.hed') # try to find ctx file if os.path.isfile(base_path + '.ctx'): external_files.append(base_path + '.ctx') elif os.path.isfile(base_path + '.ctx.gz'): external_files.append(base_path + '.ctx.gz') # replace path to match symlink location paths_to_replace.append(split_line[0]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
[ "def", "_parse_geo_file", "(", "self", ",", "file_path", ",", "run_input_dir", ")", ":", "external_files", "=", "[", "]", "paths_to_replace", "=", "[", "]", "with", "open", "(", "file_path", ",", "'r'", ")", "as", "geo_f", ":", "for", "line", "in", "geo_f", ".", "readlines", "(", ")", ":", "split_line", "=", "line", ".", "split", "(", ")", "if", "len", "(", "split_line", ")", ">", "0", "and", "not", "line", ".", "startswith", "(", "\"*\"", ")", ":", "base_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "input_path", ",", "split_line", "[", "0", "]", ")", "if", "os", ".", "path", ".", "isfile", "(", "base_path", "+", "'.hed'", ")", ":", "logger", ".", "debug", "(", "\"Found ctx + hed files: {0}\"", ".", "format", "(", "base_path", ")", ")", "external_files", ".", "append", "(", "base_path", "+", "'.hed'", ")", "# try to find ctx file", "if", "os", ".", "path", ".", "isfile", "(", "base_path", "+", "'.ctx'", ")", ":", "external_files", ".", "append", "(", "base_path", "+", "'.ctx'", ")", "elif", "os", ".", "path", ".", "isfile", "(", "base_path", "+", "'.ctx.gz'", ")", ":", "external_files", ".", "append", "(", "base_path", "+", "'.ctx.gz'", ")", "# replace path to match symlink location", "paths_to_replace", ".", "append", "(", "split_line", "[", "0", "]", ")", "if", "paths_to_replace", ":", "run_dir_config_file", "=", "os", ".", "path", ".", "join", "(", "run_input_dir", ",", "os", ".", "path", ".", "split", "(", "file_path", ")", "[", "-", "1", "]", ")", "logger", ".", "debug", "(", "\"Calling rewrite_paths method on file: {0}\"", ".", "format", "(", "run_dir_config_file", ")", ")", "self", ".", "_rewrite_paths_in_file", "(", "run_dir_config_file", ",", "paths_to_replace", ")", "return", "external_files" ]
Scan SH12A GEO file for references to external files (like voxelised geometry) and return them
[ "Scan", "SH12A", "GEO", "file", "for", "references", "to", "external", "files", "(", "like", "voxelised", "geometry", ")", "and", "return", "them" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L135-L158
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._parse_mat_file
def _parse_mat_file(self, file_path): """Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers""" mat_file_sections = self._extract_mat_sections(file_path) return self._analyse_mat_sections(mat_file_sections)
python
def _parse_mat_file(self, file_path): """Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers""" mat_file_sections = self._extract_mat_sections(file_path) return self._analyse_mat_sections(mat_file_sections)
[ "def", "_parse_mat_file", "(", "self", ",", "file_path", ")", ":", "mat_file_sections", "=", "self", ".", "_extract_mat_sections", "(", "file_path", ")", "return", "self", ".", "_analyse_mat_sections", "(", "mat_file_sections", ")" ]
Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers
[ "Scan", "SH12A", "MAT", "file", "for", "ICRU", "+", "LOADEX", "pairs", "and", "return", "found", "ICRU", "numbers" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L160-L163
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._analyse_mat_sections
def _analyse_mat_sections(sections): """ Cases: - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary, no need to load external files - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag, any number following LOADDEDX flag is ignored. - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX - ICRU flag missing, LOADDEDX flag missing -> nothing happens """ icru_numbers = [] for section in sections: load_present = False load_value = False icru_value = False for e in section: split_line = e.split() if "LOADDEDX" in e: load_present = True if len(split_line) > 1: load_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments elif "ICRU" in e and len(split_line) > 1: icru_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments if load_present: # LOADDEDX is present, so external file is required if icru_value: # if ICRU value was given icru_numbers.append(icru_value) elif load_value: # if only LOADDEDX with values was present in section icru_numbers.append(load_value) return icru_numbers
python
def _analyse_mat_sections(sections): """ Cases: - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary, no need to load external files - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag, any number following LOADDEDX flag is ignored. - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX - ICRU flag missing, LOADDEDX flag missing -> nothing happens """ icru_numbers = [] for section in sections: load_present = False load_value = False icru_value = False for e in section: split_line = e.split() if "LOADDEDX" in e: load_present = True if len(split_line) > 1: load_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments elif "ICRU" in e and len(split_line) > 1: icru_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments if load_present: # LOADDEDX is present, so external file is required if icru_value: # if ICRU value was given icru_numbers.append(icru_value) elif load_value: # if only LOADDEDX with values was present in section icru_numbers.append(load_value) return icru_numbers
[ "def", "_analyse_mat_sections", "(", "sections", ")", ":", "icru_numbers", "=", "[", "]", "for", "section", "in", "sections", ":", "load_present", "=", "False", "load_value", "=", "False", "icru_value", "=", "False", "for", "e", "in", "section", ":", "split_line", "=", "e", ".", "split", "(", ")", "if", "\"LOADDEDX\"", "in", "e", ":", "load_present", "=", "True", "if", "len", "(", "split_line", ")", ">", "1", ":", "load_value", "=", "split_line", "[", "1", "]", "if", "\"!\"", "not", "in", "split_line", "[", "1", "]", "else", "False", "# ignore ! comments", "elif", "\"ICRU\"", "in", "e", "and", "len", "(", "split_line", ")", ">", "1", ":", "icru_value", "=", "split_line", "[", "1", "]", "if", "\"!\"", "not", "in", "split_line", "[", "1", "]", "else", "False", "# ignore ! comments", "if", "load_present", ":", "# LOADDEDX is present, so external file is required", "if", "icru_value", ":", "# if ICRU value was given", "icru_numbers", ".", "append", "(", "icru_value", ")", "elif", "load_value", ":", "# if only LOADDEDX with values was present in section", "icru_numbers", ".", "append", "(", "load_value", ")", "return", "icru_numbers" ]
Cases: - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary, no need to load external files - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag, any number following LOADDEDX flag is ignored. - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX - ICRU flag missing, LOADDEDX flag missing -> nothing happens
[ "Cases", ":", "-", "ICRU", "flag", "present", "LOADDEDX", "flag", "missing", "-", ">", "data", "loaded", "from", "some", "data", "hardcoded", "in", "SH12A", "binary", "no", "need", "to", "load", "external", "files", "-", "ICRU", "flag", "present", "LOADDEDX", "flag", "present", "-", ">", "data", "loaded", "from", "external", "files", ".", "ICRU", "number", "read", "from", "ICRU", "flag", "any", "number", "following", "LOADDEDX", "flag", "is", "ignored", ".", "-", "ICRU", "flag", "missing", "LOADDEDX", "flag", "present", "-", ">", "data", "loaded", "from", "external", "files", ".", "ICRU", "number", "read", "from", "LOADDEDX", "-", "ICRU", "flag", "missing", "LOADDEDX", "flag", "missing", "-", ">", "nothing", "happens" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L182-L210
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._decrypt_icru_files
def _decrypt_icru_files(numbers): """Find matching file names for given ICRU numbers""" import json icru_file = resource_string(__name__, os.path.join('data', 'SH12A_ICRU_table.json')) ref_dict = json.loads(icru_file.decode('ascii')) try: return [ref_dict[e] for e in numbers] except KeyError as er: logger.error("There is no ICRU file for id: {0}".format(er)) raise
python
def _decrypt_icru_files(numbers): """Find matching file names for given ICRU numbers""" import json icru_file = resource_string(__name__, os.path.join('data', 'SH12A_ICRU_table.json')) ref_dict = json.loads(icru_file.decode('ascii')) try: return [ref_dict[e] for e in numbers] except KeyError as er: logger.error("There is no ICRU file for id: {0}".format(er)) raise
[ "def", "_decrypt_icru_files", "(", "numbers", ")", ":", "import", "json", "icru_file", "=", "resource_string", "(", "__name__", ",", "os", ".", "path", ".", "join", "(", "'data'", ",", "'SH12A_ICRU_table.json'", ")", ")", "ref_dict", "=", "json", ".", "loads", "(", "icru_file", ".", "decode", "(", "'ascii'", ")", ")", "try", ":", "return", "[", "ref_dict", "[", "e", "]", "for", "e", "in", "numbers", "]", "except", "KeyError", "as", "er", ":", "logger", ".", "error", "(", "\"There is no ICRU file for id: {0}\"", ".", "format", "(", "er", ")", ")", "raise" ]
Find matching file names for given ICRU numbers
[ "Find", "matching", "file", "names", "for", "given", "ICRU", "numbers" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L213-L222
DataMedSci/mcpartools
mcpartools/mcengine/shieldhit.py
ShieldHit._rewrite_paths_in_file
def _rewrite_paths_in_file(config_file, paths_to_replace): """ Rewrite paths in config files to match convention job_xxxx/symlink Requires path to run_xxxx/input/config_file and a list of paths_to_replace """ lines = [] # make a copy of config import shutil shutil.copyfile(config_file, str(config_file + '_original')) with open(config_file) as infile: for line in infile: for old_path in paths_to_replace: if old_path in line: new_path = os.path.split(old_path)[-1] line = line.replace(old_path, new_path) logger.debug("Changed path {0} ---> {1} in file {2}".format(old_path, new_path, config_file)) lines.append(line) with open(config_file, 'w') as outfile: for line in lines: outfile.write(line)
python
def _rewrite_paths_in_file(config_file, paths_to_replace): """ Rewrite paths in config files to match convention job_xxxx/symlink Requires path to run_xxxx/input/config_file and a list of paths_to_replace """ lines = [] # make a copy of config import shutil shutil.copyfile(config_file, str(config_file + '_original')) with open(config_file) as infile: for line in infile: for old_path in paths_to_replace: if old_path in line: new_path = os.path.split(old_path)[-1] line = line.replace(old_path, new_path) logger.debug("Changed path {0} ---> {1} in file {2}".format(old_path, new_path, config_file)) lines.append(line) with open(config_file, 'w') as outfile: for line in lines: outfile.write(line)
[ "def", "_rewrite_paths_in_file", "(", "config_file", ",", "paths_to_replace", ")", ":", "lines", "=", "[", "]", "# make a copy of config", "import", "shutil", "shutil", ".", "copyfile", "(", "config_file", ",", "str", "(", "config_file", "+", "'_original'", ")", ")", "with", "open", "(", "config_file", ")", "as", "infile", ":", "for", "line", "in", "infile", ":", "for", "old_path", "in", "paths_to_replace", ":", "if", "old_path", "in", "line", ":", "new_path", "=", "os", ".", "path", ".", "split", "(", "old_path", ")", "[", "-", "1", "]", "line", "=", "line", ".", "replace", "(", "old_path", ",", "new_path", ")", "logger", ".", "debug", "(", "\"Changed path {0} ---> {1} in file {2}\"", ".", "format", "(", "old_path", ",", "new_path", ",", "config_file", ")", ")", "lines", ".", "append", "(", "line", ")", "with", "open", "(", "config_file", ",", "'w'", ")", "as", "outfile", ":", "for", "line", "in", "lines", ":", "outfile", ".", "write", "(", "line", ")" ]
Rewrite paths in config files to match convention job_xxxx/symlink Requires path to run_xxxx/input/config_file and a list of paths_to_replace
[ "Rewrite", "paths", "in", "config", "files", "to", "match", "convention", "job_xxxx", "/", "symlink", "Requires", "path", "to", "run_xxxx", "/", "input", "/", "config_file", "and", "a", "list", "of", "paths_to_replace" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/mcengine/shieldhit.py#L225-L244
Karaage-Cluster/python-tldap
tldap/django/helpers.py
_check_exists
def _check_exists(database: Database, table: LdapObjectClass, key: str, value: str): """ Check if a given LDAP object exists. """ try: get_one(table, Q(**{key: value}), database=database) return True except ObjectDoesNotExist: return False
python
def _check_exists(database: Database, table: LdapObjectClass, key: str, value: str): """ Check if a given LDAP object exists. """ try: get_one(table, Q(**{key: value}), database=database) return True except ObjectDoesNotExist: return False
[ "def", "_check_exists", "(", "database", ":", "Database", ",", "table", ":", "LdapObjectClass", ",", "key", ":", "str", ",", "value", ":", "str", ")", ":", "try", ":", "get_one", "(", "table", ",", "Q", "(", "*", "*", "{", "key", ":", "value", "}", ")", ",", "database", "=", "database", ")", "return", "True", "except", "ObjectDoesNotExist", ":", "return", "False" ]
Check if a given LDAP object exists.
[ "Check", "if", "a", "given", "LDAP", "object", "exists", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/django/helpers.py#L26-L32
Karaage-Cluster/python-tldap
tldap/django/helpers.py
save_account
def save_account(changes: Changeset, table: LdapObjectClass, database: Database) -> Changeset: """ Modify a changes to add an automatically generated uidNumber. """ d = {} settings = database.settings uid_number = changes.get_value_as_single('uidNumber') if uid_number is None: scheme = settings['NUMBER_SCHEME'] first = settings.get('UID_FIRST', 10000) d['uidNumber'] = Counters.get_and_increment( scheme, "uidNumber", first, lambda n: not _check_exists(database, table, 'uidNumber', n) ) changes = changes.merge(d) return changes
python
def save_account(changes: Changeset, table: LdapObjectClass, database: Database) -> Changeset: """ Modify a changes to add an automatically generated uidNumber. """ d = {} settings = database.settings uid_number = changes.get_value_as_single('uidNumber') if uid_number is None: scheme = settings['NUMBER_SCHEME'] first = settings.get('UID_FIRST', 10000) d['uidNumber'] = Counters.get_and_increment( scheme, "uidNumber", first, lambda n: not _check_exists(database, table, 'uidNumber', n) ) changes = changes.merge(d) return changes
[ "def", "save_account", "(", "changes", ":", "Changeset", ",", "table", ":", "LdapObjectClass", ",", "database", ":", "Database", ")", "->", "Changeset", ":", "d", "=", "{", "}", "settings", "=", "database", ".", "settings", "uid_number", "=", "changes", ".", "get_value_as_single", "(", "'uidNumber'", ")", "if", "uid_number", "is", "None", ":", "scheme", "=", "settings", "[", "'NUMBER_SCHEME'", "]", "first", "=", "settings", ".", "get", "(", "'UID_FIRST'", ",", "10000", ")", "d", "[", "'uidNumber'", "]", "=", "Counters", ".", "get_and_increment", "(", "scheme", ",", "\"uidNumber\"", ",", "first", ",", "lambda", "n", ":", "not", "_check_exists", "(", "database", ",", "table", ",", "'uidNumber'", ",", "n", ")", ")", "changes", "=", "changes", ".", "merge", "(", "d", ")", "return", "changes" ]
Modify a changes to add an automatically generated uidNumber.
[ "Modify", "a", "changes", "to", "add", "an", "automatically", "generated", "uidNumber", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/django/helpers.py#L35-L50
aroberge/experimental
experimental/transformers/switch_statement.py
transform_source
def transform_source(text): '''Replaces instances of switch expression: by for __case in _Switch(n): and replaces case expression: by if __case(expression): and default: by if __case(): ''' toks = tokenize.generate_tokens(StringIO(text).readline) result = [] replacing_keyword = False for toktype, tokvalue, _, _, _ in toks: if toktype == tokenize.NAME and tokvalue == 'switch': result.extend([ (tokenize.NAME, 'for'), (tokenize.NAME, '__case'), (tokenize.NAME, 'in'), (tokenize.NAME, '_Switch'), (tokenize.OP, '(') ]) replacing_keyword = True elif toktype == tokenize.NAME and (tokvalue == 'case' or tokvalue == 'default'): result.extend([ (tokenize.NAME, 'if'), (tokenize.NAME, '__case'), (tokenize.OP, '(') ]) replacing_keyword = True elif replacing_keyword and tokvalue == ':': result.extend([ (tokenize.OP, ')'), (tokenize.OP, ':') ]) replacing_keyword = False else: result.append((toktype, tokvalue)) return tokenize.untokenize(result)
python
def transform_source(text): '''Replaces instances of switch expression: by for __case in _Switch(n): and replaces case expression: by if __case(expression): and default: by if __case(): ''' toks = tokenize.generate_tokens(StringIO(text).readline) result = [] replacing_keyword = False for toktype, tokvalue, _, _, _ in toks: if toktype == tokenize.NAME and tokvalue == 'switch': result.extend([ (tokenize.NAME, 'for'), (tokenize.NAME, '__case'), (tokenize.NAME, 'in'), (tokenize.NAME, '_Switch'), (tokenize.OP, '(') ]) replacing_keyword = True elif toktype == tokenize.NAME and (tokvalue == 'case' or tokvalue == 'default'): result.extend([ (tokenize.NAME, 'if'), (tokenize.NAME, '__case'), (tokenize.OP, '(') ]) replacing_keyword = True elif replacing_keyword and tokvalue == ':': result.extend([ (tokenize.OP, ')'), (tokenize.OP, ':') ]) replacing_keyword = False else: result.append((toktype, tokvalue)) return tokenize.untokenize(result)
[ "def", "transform_source", "(", "text", ")", ":", "toks", "=", "tokenize", ".", "generate_tokens", "(", "StringIO", "(", "text", ")", ".", "readline", ")", "result", "=", "[", "]", "replacing_keyword", "=", "False", "for", "toktype", ",", "tokvalue", ",", "_", ",", "_", ",", "_", "in", "toks", ":", "if", "toktype", "==", "tokenize", ".", "NAME", "and", "tokvalue", "==", "'switch'", ":", "result", ".", "extend", "(", "[", "(", "tokenize", ".", "NAME", ",", "'for'", ")", ",", "(", "tokenize", ".", "NAME", ",", "'__case'", ")", ",", "(", "tokenize", ".", "NAME", ",", "'in'", ")", ",", "(", "tokenize", ".", "NAME", ",", "'_Switch'", ")", ",", "(", "tokenize", ".", "OP", ",", "'('", ")", "]", ")", "replacing_keyword", "=", "True", "elif", "toktype", "==", "tokenize", ".", "NAME", "and", "(", "tokvalue", "==", "'case'", "or", "tokvalue", "==", "'default'", ")", ":", "result", ".", "extend", "(", "[", "(", "tokenize", ".", "NAME", ",", "'if'", ")", ",", "(", "tokenize", ".", "NAME", ",", "'__case'", ")", ",", "(", "tokenize", ".", "OP", ",", "'('", ")", "]", ")", "replacing_keyword", "=", "True", "elif", "replacing_keyword", "and", "tokvalue", "==", "':'", ":", "result", ".", "extend", "(", "[", "(", "tokenize", ".", "OP", ",", "')'", ")", ",", "(", "tokenize", ".", "OP", ",", "':'", ")", "]", ")", "replacing_keyword", "=", "False", "else", ":", "result", ".", "append", "(", "(", "toktype", ",", "tokvalue", ")", ")", "return", "tokenize", ".", "untokenize", "(", "result", ")" ]
Replaces instances of switch expression: by for __case in _Switch(n): and replaces case expression: by if __case(expression): and default: by if __case():
[ "Replaces", "instances", "of" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/switch_statement.py#L73-L125
Karaage-Cluster/python-tldap
tldap/backend/base.py
LdapBase.search
def search(self, base, scope, filterstr='(objectClass=*)', attrlist=None, limit=None) -> Generator[Tuple[str, dict], None, None]: """ Search for entries in LDAP database. """ _debug("search", base, scope, filterstr, attrlist, limit) # first results if attrlist is None: attrlist = ldap3.ALL_ATTRIBUTES elif isinstance(attrlist, set): attrlist = list(attrlist) def first_results(obj): _debug("---> searching ldap", limit) obj.search( base, filterstr, scope, attributes=attrlist, paged_size=limit) return obj.response # get the 1st result result_list = self._do_with_retry(first_results) # Loop over list of search results for result_item in result_list: # skip searchResRef for now if result_item['type'] != "searchResEntry": continue dn = result_item['dn'] attributes = result_item['raw_attributes'] # did we already retrieve this from cache? _debug("---> got ldap result", dn) _debug("---> yielding", result_item) yield (dn, attributes) # we are finished - return results, eat cake _debug("---> done") return
python
def search(self, base, scope, filterstr='(objectClass=*)', attrlist=None, limit=None) -> Generator[Tuple[str, dict], None, None]: """ Search for entries in LDAP database. """ _debug("search", base, scope, filterstr, attrlist, limit) # first results if attrlist is None: attrlist = ldap3.ALL_ATTRIBUTES elif isinstance(attrlist, set): attrlist = list(attrlist) def first_results(obj): _debug("---> searching ldap", limit) obj.search( base, filterstr, scope, attributes=attrlist, paged_size=limit) return obj.response # get the 1st result result_list = self._do_with_retry(first_results) # Loop over list of search results for result_item in result_list: # skip searchResRef for now if result_item['type'] != "searchResEntry": continue dn = result_item['dn'] attributes = result_item['raw_attributes'] # did we already retrieve this from cache? _debug("---> got ldap result", dn) _debug("---> yielding", result_item) yield (dn, attributes) # we are finished - return results, eat cake _debug("---> done") return
[ "def", "search", "(", "self", ",", "base", ",", "scope", ",", "filterstr", "=", "'(objectClass=*)'", ",", "attrlist", "=", "None", ",", "limit", "=", "None", ")", "->", "Generator", "[", "Tuple", "[", "str", ",", "dict", "]", ",", "None", ",", "None", "]", ":", "_debug", "(", "\"search\"", ",", "base", ",", "scope", ",", "filterstr", ",", "attrlist", ",", "limit", ")", "# first results", "if", "attrlist", "is", "None", ":", "attrlist", "=", "ldap3", ".", "ALL_ATTRIBUTES", "elif", "isinstance", "(", "attrlist", ",", "set", ")", ":", "attrlist", "=", "list", "(", "attrlist", ")", "def", "first_results", "(", "obj", ")", ":", "_debug", "(", "\"---> searching ldap\"", ",", "limit", ")", "obj", ".", "search", "(", "base", ",", "filterstr", ",", "scope", ",", "attributes", "=", "attrlist", ",", "paged_size", "=", "limit", ")", "return", "obj", ".", "response", "# get the 1st result", "result_list", "=", "self", ".", "_do_with_retry", "(", "first_results", ")", "# Loop over list of search results", "for", "result_item", "in", "result_list", ":", "# skip searchResRef for now", "if", "result_item", "[", "'type'", "]", "!=", "\"searchResEntry\"", ":", "continue", "dn", "=", "result_item", "[", "'dn'", "]", "attributes", "=", "result_item", "[", "'raw_attributes'", "]", "# did we already retrieve this from cache?", "_debug", "(", "\"---> got ldap result\"", ",", "dn", ")", "_debug", "(", "\"---> yielding\"", ",", "result_item", ")", "yield", "(", "dn", ",", "attributes", ")", "# we are finished - return results, eat cake", "_debug", "(", "\"---> done\"", ")", "return" ]
Search for entries in LDAP database.
[ "Search", "for", "entries", "in", "LDAP", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/base.py#L155-L192
Karaage-Cluster/python-tldap
tldap/backend/base.py
LdapBase.rename
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None: """ rename a dn in the ldap database; see ldap module. doesn't return a result if transactions enabled. """ raise NotImplementedError()
python
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None: """ rename a dn in the ldap database; see ldap module. doesn't return a result if transactions enabled. """ raise NotImplementedError()
[ "def", "rename", "(", "self", ",", "dn", ":", "str", ",", "new_rdn", ":", "str", ",", "new_base_dn", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "raise", "NotImplementedError", "(", ")" ]
rename a dn in the ldap database; see ldap module. doesn't return a result if transactions enabled.
[ "rename", "a", "dn", "in", "the", "ldap", "database", ";", "see", "ldap", "module", ".", "doesn", "t", "return", "a", "result", "if", "transactions", "enabled", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/base.py#L277-L282
qubell/contrib-python-qubell-client
example-hier-petclinic.py
prepare_env
def prepare_env(org): """ Example shows how to configure environment from scratch """ # Add services key_service = org.service(type='builtin:cobalt_secure_store', name='Keystore') wf_service = org.service(type='builtin:workflow_service', name='Workflow', parameters='{}') # Add services to environment env = org.environment(name='default') env.clean() env.add_service(key_service) env.add_service(wf_service) env.add_policy( {"action": "provisionVms", "parameter": "publicKeyId", "value": key_service.regenerate()['id']}) # Add cloud provider account access = { "provider": "aws-ec2", "usedEnvironments": [], "ec2SecurityGroup": "default", "providerCopy": "aws-ec2", "name": "test-provider", "jcloudsIdentity": KEY, "jcloudsCredential": SECRET_KEY, "jcloudsRegions": "us-east-1" } prov = org.provider(access) env.add_provider(prov) return org.organizationId
python
def prepare_env(org): """ Example shows how to configure environment from scratch """ # Add services key_service = org.service(type='builtin:cobalt_secure_store', name='Keystore') wf_service = org.service(type='builtin:workflow_service', name='Workflow', parameters='{}') # Add services to environment env = org.environment(name='default') env.clean() env.add_service(key_service) env.add_service(wf_service) env.add_policy( {"action": "provisionVms", "parameter": "publicKeyId", "value": key_service.regenerate()['id']}) # Add cloud provider account access = { "provider": "aws-ec2", "usedEnvironments": [], "ec2SecurityGroup": "default", "providerCopy": "aws-ec2", "name": "test-provider", "jcloudsIdentity": KEY, "jcloudsCredential": SECRET_KEY, "jcloudsRegions": "us-east-1" } prov = org.provider(access) env.add_provider(prov) return org.organizationId
[ "def", "prepare_env", "(", "org", ")", ":", "# Add services", "key_service", "=", "org", ".", "service", "(", "type", "=", "'builtin:cobalt_secure_store'", ",", "name", "=", "'Keystore'", ")", "wf_service", "=", "org", ".", "service", "(", "type", "=", "'builtin:workflow_service'", ",", "name", "=", "'Workflow'", ",", "parameters", "=", "'{}'", ")", "# Add services to environment", "env", "=", "org", ".", "environment", "(", "name", "=", "'default'", ")", "env", ".", "clean", "(", ")", "env", ".", "add_service", "(", "key_service", ")", "env", ".", "add_service", "(", "wf_service", ")", "env", ".", "add_policy", "(", "{", "\"action\"", ":", "\"provisionVms\"", ",", "\"parameter\"", ":", "\"publicKeyId\"", ",", "\"value\"", ":", "key_service", ".", "regenerate", "(", ")", "[", "'id'", "]", "}", ")", "# Add cloud provider account", "access", "=", "{", "\"provider\"", ":", "\"aws-ec2\"", ",", "\"usedEnvironments\"", ":", "[", "]", ",", "\"ec2SecurityGroup\"", ":", "\"default\"", ",", "\"providerCopy\"", ":", "\"aws-ec2\"", ",", "\"name\"", ":", "\"test-provider\"", ",", "\"jcloudsIdentity\"", ":", "KEY", ",", "\"jcloudsCredential\"", ":", "SECRET_KEY", ",", "\"jcloudsRegions\"", ":", "\"us-east-1\"", "}", "prov", "=", "org", ".", "provider", "(", "access", ")", "env", ".", "add_provider", "(", "prov", ")", "return", "org", ".", "organizationId" ]
Example shows how to configure environment from scratch
[ "Example", "shows", "how", "to", "configure", "environment", "from", "scratch" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/example-hier-petclinic.py#L40-L70
quantmind/agile-toolkit
agiletoolkit/commands.py
start
def start(ctx, debug, version, config): """Commands for devops operations""" ctx.obj = {} ctx.DEBUG = debug if os.path.isfile(config): with open(config) as fp: agile = json.load(fp) else: agile = {} ctx.obj['agile'] = agile if version: click.echo(__version__) ctx.exit(0) if not ctx.invoked_subcommand: click.echo(ctx.get_help())
python
def start(ctx, debug, version, config): """Commands for devops operations""" ctx.obj = {} ctx.DEBUG = debug if os.path.isfile(config): with open(config) as fp: agile = json.load(fp) else: agile = {} ctx.obj['agile'] = agile if version: click.echo(__version__) ctx.exit(0) if not ctx.invoked_subcommand: click.echo(ctx.get_help())
[ "def", "start", "(", "ctx", ",", "debug", ",", "version", ",", "config", ")", ":", "ctx", ".", "obj", "=", "{", "}", "ctx", ".", "DEBUG", "=", "debug", "if", "os", ".", "path", ".", "isfile", "(", "config", ")", ":", "with", "open", "(", "config", ")", "as", "fp", ":", "agile", "=", "json", ".", "load", "(", "fp", ")", "else", ":", "agile", "=", "{", "}", "ctx", ".", "obj", "[", "'agile'", "]", "=", "agile", "if", "version", ":", "click", ".", "echo", "(", "__version__", ")", "ctx", ".", "exit", "(", "0", ")", "if", "not", "ctx", ".", "invoked_subcommand", ":", "click", ".", "echo", "(", "ctx", ".", "get_help", "(", ")", ")" ]
Commands for devops operations
[ "Commands", "for", "devops", "operations" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/commands.py#L36-L50
developersociety/django-glitter
glitter/utils.py
duplicate
def duplicate(obj, value=None, field=None, duplicate_order=None): """ Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved. """ using = router.db_for_write(obj._meta.model) collector = CloneCollector(using=using) collector.collect([obj]) collector.sort() related_models = list(collector.data.keys()) data_snapshot = {} for key in collector.data.keys(): data_snapshot.update({ key: dict(zip( [item.pk for item in collector.data[key]], [item for item in collector.data[key]])) }) root_obj = None # Sometimes it's good enough just to save in reverse deletion order. if duplicate_order is None: duplicate_order = reversed(related_models) for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: fk_value = getattr(obj, "%s_id" % fk.name) # If this FK has been duplicated then point to the duplicate. fk_rel_to = data_snapshot[fk.rel.to] if fk_value in fk_rel_to: dupe_obj = fk_rel_to[fk_value] setattr(obj, fk.name, dupe_obj) # Duplicate the object and save it. obj.id = None if field is not None: setattr(obj, field, value) obj.save() if root_obj is None: root_obj = obj return root_obj
python
def duplicate(obj, value=None, field=None, duplicate_order=None): """ Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved. """ using = router.db_for_write(obj._meta.model) collector = CloneCollector(using=using) collector.collect([obj]) collector.sort() related_models = list(collector.data.keys()) data_snapshot = {} for key in collector.data.keys(): data_snapshot.update({ key: dict(zip( [item.pk for item in collector.data[key]], [item for item in collector.data[key]])) }) root_obj = None # Sometimes it's good enough just to save in reverse deletion order. if duplicate_order is None: duplicate_order = reversed(related_models) for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: fk_value = getattr(obj, "%s_id" % fk.name) # If this FK has been duplicated then point to the duplicate. fk_rel_to = data_snapshot[fk.rel.to] if fk_value in fk_rel_to: dupe_obj = fk_rel_to[fk_value] setattr(obj, fk.name, dupe_obj) # Duplicate the object and save it. obj.id = None if field is not None: setattr(obj, field, value) obj.save() if root_obj is None: root_obj = obj return root_obj
[ "def", "duplicate", "(", "obj", ",", "value", "=", "None", ",", "field", "=", "None", ",", "duplicate_order", "=", "None", ")", ":", "using", "=", "router", ".", "db_for_write", "(", "obj", ".", "_meta", ".", "model", ")", "collector", "=", "CloneCollector", "(", "using", "=", "using", ")", "collector", ".", "collect", "(", "[", "obj", "]", ")", "collector", ".", "sort", "(", ")", "related_models", "=", "list", "(", "collector", ".", "data", ".", "keys", "(", ")", ")", "data_snapshot", "=", "{", "}", "for", "key", "in", "collector", ".", "data", ".", "keys", "(", ")", ":", "data_snapshot", ".", "update", "(", "{", "key", ":", "dict", "(", "zip", "(", "[", "item", ".", "pk", "for", "item", "in", "collector", ".", "data", "[", "key", "]", "]", ",", "[", "item", "for", "item", "in", "collector", ".", "data", "[", "key", "]", "]", ")", ")", "}", ")", "root_obj", "=", "None", "# Sometimes it's good enough just to save in reverse deletion order.", "if", "duplicate_order", "is", "None", ":", "duplicate_order", "=", "reversed", "(", "related_models", ")", "for", "model", "in", "duplicate_order", ":", "# Find all FKs on model that point to a related_model.", "fks", "=", "[", "]", "for", "f", "in", "model", ".", "_meta", ".", "fields", ":", "if", "isinstance", "(", "f", ",", "ForeignKey", ")", "and", "f", ".", "rel", ".", "to", "in", "related_models", ":", "fks", ".", "append", "(", "f", ")", "# Replace each `sub_obj` with a duplicate.", "if", "model", "not", "in", "collector", ".", "data", ":", "continue", "sub_objects", "=", "collector", ".", "data", "[", "model", "]", "for", "obj", "in", "sub_objects", ":", "for", "fk", "in", "fks", ":", "fk_value", "=", "getattr", "(", "obj", ",", "\"%s_id\"", "%", "fk", ".", "name", ")", "# If this FK has been duplicated then point to the duplicate.", "fk_rel_to", "=", "data_snapshot", "[", "fk", ".", "rel", ".", "to", "]", "if", "fk_value", "in", "fk_rel_to", ":", "dupe_obj", "=", "fk_rel_to", "[", "fk_value", "]", "setattr", "(", "obj", ",", "fk", ".", "name", ",", "dupe_obj", ")", "# Duplicate the object and save it.", "obj", ".", "id", "=", "None", "if", "field", "is", "not", "None", ":", "setattr", "(", "obj", ",", "field", ",", "value", ")", "obj", ".", "save", "(", ")", "if", "root_obj", "is", "None", ":", "root_obj", "=", "obj", "return", "root_obj" ]
Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved.
[ "Duplicate", "all", "related", "objects", "of", "obj", "setting", "field", "to", "value", ".", "If", "one", "of", "the", "duplicate", "objects", "has", "an", "FK", "to", "another", "duplicate", "object", "update", "that", "as", "well", ".", "Return", "the", "duplicate", "copy", "of", "obj", ".", "duplicate_order", "is", "a", "list", "of", "models", "which", "specify", "how", "the", "duplicate", "objects", "are", "saved", ".", "For", "complex", "objects", "this", "can", "matter", ".", "Check", "to", "save", "if", "objects", "are", "being", "saved", "correctly", "and", "if", "not", "just", "pass", "in", "related", "objects", "in", "the", "order", "that", "they", "should", "be", "saved", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/utils.py#L42-L98
davidblaisonneau-orange/foreman
foreman/subItemPuppetClasses.py
SubItemPuppetClasses.getPayloadStruct
def getPayloadStruct(self, attributes, objType): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param attribute: The data @param objType: SubItem type (e.g: hostgroup for hostgroup_class) @return RETURN: the payload """ payload = {self.payloadObj: attributes, objType + "_class": {self.payloadObj: attributes}} return payload
python
def getPayloadStruct(self, attributes, objType): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param attribute: The data @param objType: SubItem type (e.g: hostgroup for hostgroup_class) @return RETURN: the payload """ payload = {self.payloadObj: attributes, objType + "_class": {self.payloadObj: attributes}} return payload
[ "def", "getPayloadStruct", "(", "self", ",", "attributes", ",", "objType", ")", ":", "payload", "=", "{", "self", ".", "payloadObj", ":", "attributes", ",", "objType", "+", "\"_class\"", ":", "{", "self", ".", "payloadObj", ":", "attributes", "}", "}", "return", "payload" ]
Function getPayloadStruct Get the payload structure to do a creation or a modification @param attribute: The data @param objType: SubItem type (e.g: hostgroup for hostgroup_class) @return RETURN: the payload
[ "Function", "getPayloadStruct", "Get", "the", "payload", "structure", "to", "do", "a", "creation", "or", "a", "modification" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/subItemPuppetClasses.py#L34-L45
developersociety/django-glitter
glitter/blocks/video/validators.py
validate_url
def validate_url(value): """ Validate url. """ if not re.match(VIMEO_URL_RE, value) and not re.match(YOUTUBE_URL_RE, value): raise ValidationError('Invalid URL - only Youtube, Vimeo can be used.')
python
def validate_url(value): """ Validate url. """ if not re.match(VIMEO_URL_RE, value) and not re.match(YOUTUBE_URL_RE, value): raise ValidationError('Invalid URL - only Youtube, Vimeo can be used.')
[ "def", "validate_url", "(", "value", ")", ":", "if", "not", "re", ".", "match", "(", "VIMEO_URL_RE", ",", "value", ")", "and", "not", "re", ".", "match", "(", "YOUTUBE_URL_RE", ",", "value", ")", ":", "raise", "ValidationError", "(", "'Invalid URL - only Youtube, Vimeo can be used.'", ")" ]
Validate url.
[ "Validate", "url", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blocks/video/validators.py#L45-L48
Karaage-Cluster/python-tldap
tldap/transaction.py
enter_transaction_management
def enter_transaction_management(using=None): """ Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running). """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.enter_transaction_management() return connection = tldap.backend.connections[using] connection.enter_transaction_management()
python
def enter_transaction_management(using=None): """ Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running). """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.enter_transaction_management() return connection = tldap.backend.connections[using] connection.enter_transaction_management()
[ "def", "enter_transaction_management", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "enter_transaction_management", "(", ")", "return", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "enter_transaction_management", "(", ")" ]
Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running).
[ "Enters", "transaction", "management", "for", "a", "running", "thread", ".", "It", "must", "be", "balanced", "with", "the", "appropriate", "leave_transaction_management", "call", "since", "the", "actual", "state", "is", "managed", "as", "a", "stack", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L45-L61
Karaage-Cluster/python-tldap
tldap/transaction.py
leave_transaction_management
def leave_transaction_management(using=None): """ Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.) """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.leave_transaction_management() return connection = tldap.backend.connections[using] connection.leave_transaction_management()
python
def leave_transaction_management(using=None): """ Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.) """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.leave_transaction_management() return connection = tldap.backend.connections[using] connection.leave_transaction_management()
[ "def", "leave_transaction_management", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "leave_transaction_management", "(", ")", "return", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "leave_transaction_management", "(", ")" ]
Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.)
[ "Leaves", "transaction", "management", "for", "a", "running", "thread", ".", "A", "dirty", "flag", "is", "carried", "over", "to", "the", "surrounding", "block", "as", "a", "commit", "will", "commit", "all", "changes", "even", "those", "from", "outside", ".", "(", "Commits", "are", "on", "connection", "level", ".", ")" ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L64-L76
Karaage-Cluster/python-tldap
tldap/transaction.py
is_dirty
def is_dirty(using=None): """ Returns True if the current transaction requires a commit for changes to happen. """ if using is None: dirty = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_dirty(): dirty = True return dirty connection = tldap.backend.connections[using] return connection.is_dirty()
python
def is_dirty(using=None): """ Returns True if the current transaction requires a commit for changes to happen. """ if using is None: dirty = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_dirty(): dirty = True return dirty connection = tldap.backend.connections[using] return connection.is_dirty()
[ "def", "is_dirty", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "dirty", "=", "False", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "if", "connection", ".", "is_dirty", "(", ")", ":", "dirty", "=", "True", "return", "dirty", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "return", "connection", ".", "is_dirty", "(", ")" ]
Returns True if the current transaction requires a commit for changes to happen.
[ "Returns", "True", "if", "the", "current", "transaction", "requires", "a", "commit", "for", "changes", "to", "happen", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L79-L92
Karaage-Cluster/python-tldap
tldap/transaction.py
is_managed
def is_managed(using=None): """ Checks whether the transaction manager is in manual or in auto state. """ if using is None: managed = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_managed(): managed = True return managed connection = tldap.backend.connections[using] return connection.is_managed()
python
def is_managed(using=None): """ Checks whether the transaction manager is in manual or in auto state. """ if using is None: managed = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_managed(): managed = True return managed connection = tldap.backend.connections[using] return connection.is_managed()
[ "def", "is_managed", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "managed", "=", "False", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "if", "connection", ".", "is_managed", "(", ")", ":", "managed", "=", "True", "return", "managed", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "return", "connection", ".", "is_managed", "(", ")" ]
Checks whether the transaction manager is in manual or in auto state.
[ "Checks", "whether", "the", "transaction", "manager", "is", "in", "manual", "or", "in", "auto", "state", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L95-L107
Karaage-Cluster/python-tldap
tldap/transaction.py
commit
def commit(using=None): """ Does the commit itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.commit() return connection = tldap.backend.connections[using] connection.commit()
python
def commit(using=None): """ Does the commit itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.commit() return connection = tldap.backend.connections[using] connection.commit()
[ "def", "commit", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "commit", "(", ")", "return", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "commit", "(", ")" ]
Does the commit itself and resets the dirty flag.
[ "Does", "the", "commit", "itself", "and", "resets", "the", "dirty", "flag", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L110-L120
Karaage-Cluster/python-tldap
tldap/transaction.py
rollback
def rollback(using=None): """ This function does the rollback itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.rollback() return connection = tldap.backend.connections[using] connection.rollback()
python
def rollback(using=None): """ This function does the rollback itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.rollback() return connection = tldap.backend.connections[using] connection.rollback()
[ "def", "rollback", "(", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "for", "using", "in", "tldap", ".", "backend", ".", "connections", ":", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "rollback", "(", ")", "return", "connection", "=", "tldap", ".", "backend", ".", "connections", "[", "using", "]", "connection", ".", "rollback", "(", ")" ]
This function does the rollback itself and resets the dirty flag.
[ "This", "function", "does", "the", "rollback", "itself", "and", "resets", "the", "dirty", "flag", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L123-L133
Karaage-Cluster/python-tldap
tldap/transaction.py
_transaction_func
def _transaction_func(entering, exiting, using): """ Takes 3 things, an entering function (what to do to start this block of transaction management), an exiting function (what to do to end it, on both success and failure, and using which can be: None, indiciating transaction should occur on all defined servers, or a callable, indicating that using is None and to return the function already wrapped. Returns either a Transaction objects, which is both a decorator and a context manager, or a wrapped function, if using is a callable. """ # Note that although the first argument is *called* `using`, it # may actually be a function; @autocommit and @autocommit('foo') # are both allowed forms. if callable(using): return Transaction(entering, exiting, None)(using) return Transaction(entering, exiting, using)
python
def _transaction_func(entering, exiting, using): """ Takes 3 things, an entering function (what to do to start this block of transaction management), an exiting function (what to do to end it, on both success and failure, and using which can be: None, indiciating transaction should occur on all defined servers, or a callable, indicating that using is None and to return the function already wrapped. Returns either a Transaction objects, which is both a decorator and a context manager, or a wrapped function, if using is a callable. """ # Note that although the first argument is *called* `using`, it # may actually be a function; @autocommit and @autocommit('foo') # are both allowed forms. if callable(using): return Transaction(entering, exiting, None)(using) return Transaction(entering, exiting, using)
[ "def", "_transaction_func", "(", "entering", ",", "exiting", ",", "using", ")", ":", "# Note that although the first argument is *called* `using`, it", "# may actually be a function; @autocommit and @autocommit('foo')", "# are both allowed forms.", "if", "callable", "(", "using", ")", ":", "return", "Transaction", "(", "entering", ",", "exiting", ",", "None", ")", "(", "using", ")", "return", "Transaction", "(", "entering", ",", "exiting", ",", "using", ")" ]
Takes 3 things, an entering function (what to do to start this block of transaction management), an exiting function (what to do to end it, on both success and failure, and using which can be: None, indiciating transaction should occur on all defined servers, or a callable, indicating that using is None and to return the function already wrapped. Returns either a Transaction objects, which is both a decorator and a context manager, or a wrapped function, if using is a callable.
[ "Takes", "3", "things", "an", "entering", "function", "(", "what", "to", "do", "to", "start", "this", "block", "of", "transaction", "management", ")", "an", "exiting", "function", "(", "what", "to", "do", "to", "end", "it", "on", "both", "success", "and", "failure", "and", "using", "which", "can", "be", ":", "None", "indiciating", "transaction", "should", "occur", "on", "all", "defined", "servers", "or", "a", "callable", "indicating", "that", "using", "is", "None", "and", "to", "return", "the", "function", "already", "wrapped", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L179-L195
Karaage-Cluster/python-tldap
tldap/transaction.py
commit_on_success
def commit_on_success(using=None): """ This decorator activates commit on response. This way, if the view function runs successfully, a commit is made; if the viewfunc produces an exception, a rollback is made. This is one of the most common ways to do transaction control in Web apps. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): try: if exc_value is not None: if is_dirty(using=using): rollback(using=using) else: commit(using=using) finally: leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
python
def commit_on_success(using=None): """ This decorator activates commit on response. This way, if the view function runs successfully, a commit is made; if the viewfunc produces an exception, a rollback is made. This is one of the most common ways to do transaction control in Web apps. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): try: if exc_value is not None: if is_dirty(using=using): rollback(using=using) else: commit(using=using) finally: leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
[ "def", "commit_on_success", "(", "using", "=", "None", ")", ":", "def", "entering", "(", "using", ")", ":", "enter_transaction_management", "(", "using", "=", "using", ")", "def", "exiting", "(", "exc_value", ",", "using", ")", ":", "try", ":", "if", "exc_value", "is", "not", "None", ":", "if", "is_dirty", "(", "using", "=", "using", ")", ":", "rollback", "(", "using", "=", "using", ")", "else", ":", "commit", "(", "using", "=", "using", ")", "finally", ":", "leave_transaction_management", "(", "using", "=", "using", ")", "return", "_transaction_func", "(", "entering", ",", "exiting", ",", "using", ")" ]
This decorator activates commit on response. This way, if the view function runs successfully, a commit is made; if the viewfunc produces an exception, a rollback is made. This is one of the most common ways to do transaction control in Web apps.
[ "This", "decorator", "activates", "commit", "on", "response", ".", "This", "way", "if", "the", "view", "function", "runs", "successfully", "a", "commit", "is", "made", ";", "if", "the", "viewfunc", "produces", "an", "exception", "a", "rollback", "is", "made", ".", "This", "is", "one", "of", "the", "most", "common", "ways", "to", "do", "transaction", "control", "in", "Web", "apps", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L198-L218
Karaage-Cluster/python-tldap
tldap/transaction.py
commit_manually
def commit_manually(using=None): """ Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
python
def commit_manually(using=None): """ Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
[ "def", "commit_manually", "(", "using", "=", "None", ")", ":", "def", "entering", "(", "using", ")", ":", "enter_transaction_management", "(", "using", "=", "using", ")", "def", "exiting", "(", "exc_value", ",", "using", ")", ":", "leave_transaction_management", "(", "using", "=", "using", ")", "return", "_transaction_func", "(", "entering", ",", "exiting", ",", "using", ")" ]
Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves.
[ "Decorator", "that", "activates", "manual", "transaction", "control", ".", "It", "just", "disables", "automatic", "transaction", "control", "and", "doesn", "t", "do", "any", "commit", "/", "rollback", "of", "its", "own", "--", "it", "s", "up", "to", "the", "user", "to", "call", "the", "commit", "and", "rollback", "functions", "themselves", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/transaction.py#L221-L234
jamescooke/flake8-aaa
src/flake8_aaa/checker.py
Checker.run
def run(self) -> Generator[Tuple[int, int, str, type], None, None]: """ Yields: tuple (line_number: int, offset: int, text: str, check: type) """ if is_test_file(self.filename): self.load() for func in self.all_funcs(): try: for error in func.check_all(): yield (error.line_number, error.offset, error.text, Checker) except ValidationError as error: yield error.to_flake8(Checker)
python
def run(self) -> Generator[Tuple[int, int, str, type], None, None]: """ Yields: tuple (line_number: int, offset: int, text: str, check: type) """ if is_test_file(self.filename): self.load() for func in self.all_funcs(): try: for error in func.check_all(): yield (error.line_number, error.offset, error.text, Checker) except ValidationError as error: yield error.to_flake8(Checker)
[ "def", "run", "(", "self", ")", "->", "Generator", "[", "Tuple", "[", "int", ",", "int", ",", "str", ",", "type", "]", ",", "None", ",", "None", "]", ":", "if", "is_test_file", "(", "self", ".", "filename", ")", ":", "self", ".", "load", "(", ")", "for", "func", "in", "self", ".", "all_funcs", "(", ")", ":", "try", ":", "for", "error", "in", "func", ".", "check_all", "(", ")", ":", "yield", "(", "error", ".", "line_number", ",", "error", ".", "offset", ",", "error", ".", "text", ",", "Checker", ")", "except", "ValidationError", "as", "error", ":", "yield", "error", ".", "to_flake8", "(", "Checker", ")" ]
Yields: tuple (line_number: int, offset: int, text: str, check: type)
[ "Yields", ":", "tuple", "(", "line_number", ":", "int", "offset", ":", "int", "text", ":", "str", "check", ":", "type", ")" ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/checker.py#L42-L54
developersociety/django-glitter
glitter/pages/middleware.py
GlitterUrlConfMiddleware.process_request
def process_request(self, request): """ Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django. """ global _urlconf_pages page_list = list( Page.objects.exclude(glitter_app_name='').values_list('id', 'url').order_by('id') ) with _urlconf_lock: if page_list != _urlconf_pages: glitter_urls = 'glitter.urls' if glitter_urls in sys.modules: importlib.reload(sys.modules[glitter_urls]) _urlconf_pages = page_list
python
def process_request(self, request): """ Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django. """ global _urlconf_pages page_list = list( Page.objects.exclude(glitter_app_name='').values_list('id', 'url').order_by('id') ) with _urlconf_lock: if page_list != _urlconf_pages: glitter_urls = 'glitter.urls' if glitter_urls in sys.modules: importlib.reload(sys.modules[glitter_urls]) _urlconf_pages = page_list
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "global", "_urlconf_pages", "page_list", "=", "list", "(", "Page", ".", "objects", ".", "exclude", "(", "glitter_app_name", "=", "''", ")", ".", "values_list", "(", "'id'", ",", "'url'", ")", ".", "order_by", "(", "'id'", ")", ")", "with", "_urlconf_lock", ":", "if", "page_list", "!=", "_urlconf_pages", ":", "glitter_urls", "=", "'glitter.urls'", "if", "glitter_urls", "in", "sys", ".", "modules", ":", "importlib", ".", "reload", "(", "sys", ".", "modules", "[", "glitter_urls", "]", ")", "_urlconf_pages", "=", "page_list" ]
Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django.
[ "Reloads", "glitter", "URL", "patterns", "if", "page", "URLs", "change", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/pages/middleware.py#L38-L55
MatterMiners/cobald
cobald/daemon/runners/base_runner.py
BaseRunner.run
def run(self): """ Execute all current and future payloads Blocks and executes payloads until :py:meth:`stop` is called. It is an error for any orphaned payload to return or raise. """ self._logger.info('runner started: %s', self) try: with self._lock: assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self self.running.set() self._stopped.clear() self._run() except Exception: self._logger.exception('runner aborted: %s', self) raise else: self._logger.info('runner stopped: %s', self) finally: with self._lock: self.running.clear() self._stopped.set()
python
def run(self): """ Execute all current and future payloads Blocks and executes payloads until :py:meth:`stop` is called. It is an error for any orphaned payload to return or raise. """ self._logger.info('runner started: %s', self) try: with self._lock: assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self self.running.set() self._stopped.clear() self._run() except Exception: self._logger.exception('runner aborted: %s', self) raise else: self._logger.info('runner stopped: %s', self) finally: with self._lock: self.running.clear() self._stopped.set()
[ "def", "run", "(", "self", ")", ":", "self", ".", "_logger", ".", "info", "(", "'runner started: %s'", ",", "self", ")", "try", ":", "with", "self", ".", "_lock", ":", "assert", "not", "self", ".", "running", ".", "is_set", "(", ")", "and", "self", ".", "_stopped", ".", "is_set", "(", ")", ",", "'cannot re-run: %s'", "%", "self", "self", ".", "running", ".", "set", "(", ")", "self", ".", "_stopped", ".", "clear", "(", ")", "self", ".", "_run", "(", ")", "except", "Exception", ":", "self", ".", "_logger", ".", "exception", "(", "'runner aborted: %s'", ",", "self", ")", "raise", "else", ":", "self", ".", "_logger", ".", "info", "(", "'runner stopped: %s'", ",", "self", ")", "finally", ":", "with", "self", ".", "_lock", ":", "self", ".", "running", ".", "clear", "(", ")", "self", ".", "_stopped", ".", "set", "(", ")" ]
Execute all current and future payloads Blocks and executes payloads until :py:meth:`stop` is called. It is an error for any orphaned payload to return or raise.
[ "Execute", "all", "current", "and", "future", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/base_runner.py#L45-L67
MatterMiners/cobald
cobald/daemon/runners/base_runner.py
BaseRunner.stop
def stop(self): """Stop execution of all current and future payloads""" if not self.running.wait(0.2): return self._logger.debug('runner disabled: %s', self) with self._lock: self.running.clear() self._stopped.wait()
python
def stop(self): """Stop execution of all current and future payloads""" if not self.running.wait(0.2): return self._logger.debug('runner disabled: %s', self) with self._lock: self.running.clear() self._stopped.wait()
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "running", ".", "wait", "(", "0.2", ")", ":", "return", "self", ".", "_logger", ".", "debug", "(", "'runner disabled: %s'", ",", "self", ")", "with", "self", ".", "_lock", ":", "self", ".", "running", ".", "clear", "(", ")", "self", ".", "_stopped", ".", "wait", "(", ")" ]
Stop execution of all current and future payloads
[ "Stop", "execution", "of", "all", "current", "and", "future", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/base_runner.py#L72-L79
josiah-wolf-oberholtzer/uqbar
uqbar/strings.py
delimit_words
def delimit_words(string: str) -> Generator[str, None, None]: """ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] """ # TODO: Reimplement this wordlike_characters = ("<", ">", "!") current_word = "" for i, character in enumerate(string): if ( not character.isalpha() and not character.isdigit() and character not in wordlike_characters ): if current_word: yield current_word current_word = "" elif not current_word: current_word += character elif character.isupper(): if current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character.islower(): if current_word[-1].isalpha(): current_word += character else: yield current_word current_word = character elif character.isdigit(): if current_word[-1].isdigit() or current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character in wordlike_characters: if current_word[-1] in wordlike_characters: current_word += character else: yield current_word current_word = character if current_word: yield current_word
python
def delimit_words(string: str) -> Generator[str, None, None]: """ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] """ # TODO: Reimplement this wordlike_characters = ("<", ">", "!") current_word = "" for i, character in enumerate(string): if ( not character.isalpha() and not character.isdigit() and character not in wordlike_characters ): if current_word: yield current_word current_word = "" elif not current_word: current_word += character elif character.isupper(): if current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character.islower(): if current_word[-1].isalpha(): current_word += character else: yield current_word current_word = character elif character.isdigit(): if current_word[-1].isdigit() or current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character in wordlike_characters: if current_word[-1] in wordlike_characters: current_word += character else: yield current_word current_word = character if current_word: yield current_word
[ "def", "delimit_words", "(", "string", ":", "str", ")", "->", "Generator", "[", "str", ",", "None", ",", "None", "]", ":", "# TODO: Reimplement this", "wordlike_characters", "=", "(", "\"<\"", ",", "\">\"", ",", "\"!\"", ")", "current_word", "=", "\"\"", "for", "i", ",", "character", "in", "enumerate", "(", "string", ")", ":", "if", "(", "not", "character", ".", "isalpha", "(", ")", "and", "not", "character", ".", "isdigit", "(", ")", "and", "character", "not", "in", "wordlike_characters", ")", ":", "if", "current_word", ":", "yield", "current_word", "current_word", "=", "\"\"", "elif", "not", "current_word", ":", "current_word", "+=", "character", "elif", "character", ".", "isupper", "(", ")", ":", "if", "current_word", "[", "-", "1", "]", ".", "isupper", "(", ")", ":", "current_word", "+=", "character", "else", ":", "yield", "current_word", "current_word", "=", "character", "elif", "character", ".", "islower", "(", ")", ":", "if", "current_word", "[", "-", "1", "]", ".", "isalpha", "(", ")", ":", "current_word", "+=", "character", "else", ":", "yield", "current_word", "current_word", "=", "character", "elif", "character", ".", "isdigit", "(", ")", ":", "if", "current_word", "[", "-", "1", "]", ".", "isdigit", "(", ")", "or", "current_word", "[", "-", "1", "]", ".", "isupper", "(", ")", ":", "current_word", "+=", "character", "else", ":", "yield", "current_word", "current_word", "=", "character", "elif", "character", "in", "wordlike_characters", ":", "if", "current_word", "[", "-", "1", "]", "in", "wordlike_characters", ":", "current_word", "+=", "character", "else", ":", "yield", "current_word", "current_word", "=", "character", "if", "current_word", ":", "yield", "current_word" ]
Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53']
[ "Delimit", "a", "string", "at", "word", "boundaries", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/strings.py#L18-L78
josiah-wolf-oberholtzer/uqbar
uqbar/strings.py
normalize
def normalize(string: str) -> str: """ Normalizes whitespace. Strips leading and trailing blank lines, dedents, and removes trailing whitespace from the result. """ string = string.replace("\t", " ") lines = string.split("\n") while lines and (not lines[0] or lines[0].isspace()): lines.pop(0) while lines and (not lines[-1] or lines[-1].isspace()): lines.pop() for i, line in enumerate(lines): lines[i] = line.rstrip() string = "\n".join(lines) string = textwrap.dedent(string) return string
python
def normalize(string: str) -> str: """ Normalizes whitespace. Strips leading and trailing blank lines, dedents, and removes trailing whitespace from the result. """ string = string.replace("\t", " ") lines = string.split("\n") while lines and (not lines[0] or lines[0].isspace()): lines.pop(0) while lines and (not lines[-1] or lines[-1].isspace()): lines.pop() for i, line in enumerate(lines): lines[i] = line.rstrip() string = "\n".join(lines) string = textwrap.dedent(string) return string
[ "def", "normalize", "(", "string", ":", "str", ")", "->", "str", ":", "string", "=", "string", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", "lines", "=", "string", ".", "split", "(", "\"\\n\"", ")", "while", "lines", "and", "(", "not", "lines", "[", "0", "]", "or", "lines", "[", "0", "]", ".", "isspace", "(", ")", ")", ":", "lines", ".", "pop", "(", "0", ")", "while", "lines", "and", "(", "not", "lines", "[", "-", "1", "]", "or", "lines", "[", "-", "1", "]", ".", "isspace", "(", ")", ")", ":", "lines", ".", "pop", "(", ")", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "lines", "[", "i", "]", "=", "line", ".", "rstrip", "(", ")", "string", "=", "\"\\n\"", ".", "join", "(", "lines", ")", "string", "=", "textwrap", ".", "dedent", "(", "string", ")", "return", "string" ]
Normalizes whitespace. Strips leading and trailing blank lines, dedents, and removes trailing whitespace from the result.
[ "Normalizes", "whitespace", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/strings.py#L81-L98
josiah-wolf-oberholtzer/uqbar
uqbar/strings.py
to_dash_case
def to_dash_case(string: str) -> str: """ Convert a string to dash-delimited words. :: >>> import uqbar.strings >>> string = 'Tô Đặc Biệt Xe Lửa' >>> print(uqbar.strings.to_dash_case(string)) to-dac-biet-xe-lua :: >>> string = 'alpha.beta.gamma' >>> print(uqbar.strings.to_dash_case(string)) alpha-beta-gamma """ string = unidecode.unidecode(string) words = (_.lower() for _ in delimit_words(string)) string = "-".join(words) return string
python
def to_dash_case(string: str) -> str: """ Convert a string to dash-delimited words. :: >>> import uqbar.strings >>> string = 'Tô Đặc Biệt Xe Lửa' >>> print(uqbar.strings.to_dash_case(string)) to-dac-biet-xe-lua :: >>> string = 'alpha.beta.gamma' >>> print(uqbar.strings.to_dash_case(string)) alpha-beta-gamma """ string = unidecode.unidecode(string) words = (_.lower() for _ in delimit_words(string)) string = "-".join(words) return string
[ "def", "to_dash_case", "(", "string", ":", "str", ")", "->", "str", ":", "string", "=", "unidecode", ".", "unidecode", "(", "string", ")", "words", "=", "(", "_", ".", "lower", "(", ")", "for", "_", "in", "delimit_words", "(", "string", ")", ")", "string", "=", "\"-\"", ".", "join", "(", "words", ")", "return", "string" ]
Convert a string to dash-delimited words. :: >>> import uqbar.strings >>> string = 'Tô Đặc Biệt Xe Lửa' >>> print(uqbar.strings.to_dash_case(string)) to-dac-biet-xe-lua :: >>> string = 'alpha.beta.gamma' >>> print(uqbar.strings.to_dash_case(string)) alpha-beta-gamma
[ "Convert", "a", "string", "to", "dash", "-", "delimited", "words", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/strings.py#L101-L122
aroberge/experimental
experimental/transformers/utils/simple2to3.py
get_lib2to3_fixers
def get_lib2to3_fixers(): '''returns a list of all fixers found in the lib2to3 library''' fixers = [] fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if name.startswith("fix_") and name.endswith(".py"): fixers.append("lib2to3.fixes." + name[:-3]) return fixers
python
def get_lib2to3_fixers(): '''returns a list of all fixers found in the lib2to3 library''' fixers = [] fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if name.startswith("fix_") and name.endswith(".py"): fixers.append("lib2to3.fixes." + name[:-3]) return fixers
[ "def", "get_lib2to3_fixers", "(", ")", ":", "fixers", "=", "[", "]", "fixer_dirname", "=", "fixer_dir", ".", "__path__", "[", "0", "]", "for", "name", "in", "sorted", "(", "os", ".", "listdir", "(", "fixer_dirname", ")", ")", ":", "if", "name", ".", "startswith", "(", "\"fix_\"", ")", "and", "name", ".", "endswith", "(", "\".py\"", ")", ":", "fixers", ".", "append", "(", "\"lib2to3.fixes.\"", "+", "name", "[", ":", "-", "3", "]", ")", "return", "fixers" ]
returns a list of all fixers found in the lib2to3 library
[ "returns", "a", "list", "of", "all", "fixers", "found", "in", "the", "lib2to3", "library" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/utils/simple2to3.py#L10-L17
aroberge/experimental
experimental/transformers/utils/simple2to3.py
get_single_fixer
def get_single_fixer(fixname): '''return a single fixer found in the lib2to3 library''' fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if (name.startswith("fix_") and name.endswith(".py") and fixname == name[4:-3]): return "lib2to3.fixes." + name[:-3]
python
def get_single_fixer(fixname): '''return a single fixer found in the lib2to3 library''' fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if (name.startswith("fix_") and name.endswith(".py") and fixname == name[4:-3]): return "lib2to3.fixes." + name[:-3]
[ "def", "get_single_fixer", "(", "fixname", ")", ":", "fixer_dirname", "=", "fixer_dir", ".", "__path__", "[", "0", "]", "for", "name", "in", "sorted", "(", "os", ".", "listdir", "(", "fixer_dirname", ")", ")", ":", "if", "(", "name", ".", "startswith", "(", "\"fix_\"", ")", "and", "name", ".", "endswith", "(", "\".py\"", ")", "and", "fixname", "==", "name", "[", "4", ":", "-", "3", "]", ")", ":", "return", "\"lib2to3.fixes.\"", "+", "name", "[", ":", "-", "3", "]" ]
return a single fixer found in the lib2to3 library
[ "return", "a", "single", "fixer", "found", "in", "the", "lib2to3", "library" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/utils/simple2to3.py#L20-L26
Karaage-Cluster/python-tldap
tldap/fields.py
Field.to_db
def to_db(self, value): """ Returns field's single value prepared for saving into a database. """ # ensure value is valid self.validate(value) assert isinstance(value, list) value = list(value) for i, v in enumerate(value): value[i] = self.value_to_db(v) # return result assert isinstance(value, list) return value
python
def to_db(self, value): """ Returns field's single value prepared for saving into a database. """ # ensure value is valid self.validate(value) assert isinstance(value, list) value = list(value) for i, v in enumerate(value): value[i] = self.value_to_db(v) # return result assert isinstance(value, list) return value
[ "def", "to_db", "(", "self", ",", "value", ")", ":", "# ensure value is valid", "self", ".", "validate", "(", "value", ")", "assert", "isinstance", "(", "value", ",", "list", ")", "value", "=", "list", "(", "value", ")", "for", "i", ",", "v", "in", "enumerate", "(", "value", ")", ":", "value", "[", "i", "]", "=", "self", ".", "value_to_db", "(", "v", ")", "# return result", "assert", "isinstance", "(", "value", ",", "list", ")", "return", "value" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L42-L55
Karaage-Cluster/python-tldap
tldap/fields.py
Field.to_python
def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ assert isinstance(value, list) # convert every value in list value = list(value) for i, v in enumerate(value): value[i] = self.value_to_python(v) # return result return value
python
def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ assert isinstance(value, list) # convert every value in list value = list(value) for i, v in enumerate(value): value[i] = self.value_to_python(v) # return result return value
[ "def", "to_python", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "list", ")", "# convert every value in list", "value", "=", "list", "(", "value", ")", "for", "i", ",", "v", "in", "enumerate", "(", "value", ")", ":", "value", "[", "i", "]", "=", "self", ".", "value_to_python", "(", "v", ")", "# return result", "return", "value" ]
Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L57-L71
Karaage-Cluster/python-tldap
tldap/fields.py
Field.validate
def validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ # check object type if not isinstance(value, list): raise tldap.exceptions.ValidationError( "is not a list and max_instances is %s" % self._max_instances) # check maximum instances if (self._max_instances is not None and len(value) > self._max_instances): raise tldap.exceptions.ValidationError( "exceeds max_instances of %d" % self._max_instances) # check this required value is given if self._required: if len(value) == 0: raise tldap.exceptions.ValidationError( "is required") # validate the value for i, v in enumerate(value): self.value_validate(v)
python
def validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ # check object type if not isinstance(value, list): raise tldap.exceptions.ValidationError( "is not a list and max_instances is %s" % self._max_instances) # check maximum instances if (self._max_instances is not None and len(value) > self._max_instances): raise tldap.exceptions.ValidationError( "exceeds max_instances of %d" % self._max_instances) # check this required value is given if self._required: if len(value) == 0: raise tldap.exceptions.ValidationError( "is required") # validate the value for i, v in enumerate(value): self.value_validate(v)
[ "def", "validate", "(", "self", ",", "value", ")", ":", "# check object type", "if", "not", "isinstance", "(", "value", ",", "list", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is not a list and max_instances is %s\"", "%", "self", ".", "_max_instances", ")", "# check maximum instances", "if", "(", "self", ".", "_max_instances", "is", "not", "None", "and", "len", "(", "value", ")", ">", "self", ".", "_max_instances", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"exceeds max_instances of %d\"", "%", "self", ".", "_max_instances", ")", "# check this required value is given", "if", "self", ".", "_required", ":", "if", "len", "(", "value", ")", "==", "0", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is required\"", ")", "# validate the value", "for", "i", ",", "v", "in", "enumerate", "(", "value", ")", ":", "self", ".", "value_validate", "(", "v", ")" ]
Validates value and throws ValidationError. Subclasses should override this to provide validation logic.
[ "Validates", "value", "and", "throws", "ValidationError", ".", "Subclasses", "should", "override", "this", "to", "provide", "validation", "logic", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L73-L96
Karaage-Cluster/python-tldap
tldap/fields.py
Field.clean
def clean(self, value): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value) return value
python
def clean(self, value): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value) return value
[ "def", "clean", "(", "self", ",", "value", ")", ":", "value", "=", "self", ".", "to_python", "(", "value", ")", "self", ".", "validate", "(", "value", ")", "return", "value" ]
Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised.
[ "Convert", "the", "value", "s", "type", "and", "run", "validation", ".", "Validation", "errors", "from", "to_python", "and", "validate", "are", "propagated", ".", "The", "correct", "value", "is", "returned", "if", "no", "error", "is", "raised", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L98-L106
Karaage-Cluster/python-tldap
tldap/fields.py
CharField.value_to_db
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ if isinstance(value, six.string_types): value = value.encode("utf_8") return value
python
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ if isinstance(value, six.string_types): value = value.encode("utf_8") return value
[ "def", "value_to_db", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "value", "=", "value", ".", "encode", "(", "\"utf_8\"", ")", "return", "value" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L195-L199
Karaage-Cluster/python-tldap
tldap/fields.py
CharField.value_to_python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") value = value.decode("utf_8") return value
python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") value = value.decode("utf_8") return value
[ "def", "value_to_python", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "bytes", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a bytes\"", ")", "value", "=", "value", ".", "decode", "(", "\"utf_8\"", ")", "return", "value" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L201-L211
Karaage-Cluster/python-tldap
tldap/fields.py
CharField.value_validate
def value_validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not isinstance(value, six.string_types): raise tldap.exceptions.ValidationError("should be a string")
python
def value_validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not isinstance(value, six.string_types): raise tldap.exceptions.ValidationError("should be a string")
[ "def", "value_validate", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a string\"", ")" ]
Validates value and throws ValidationError. Subclasses should override this to provide validation logic.
[ "Validates", "value", "and", "throws", "ValidationError", ".", "Subclasses", "should", "override", "this", "to", "provide", "validation", "logic", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L213-L219
Karaage-Cluster/python-tldap
tldap/fields.py
IntegerField.value_to_python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be bytes") if value is None: return value try: return int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be bytes") if value is None: return value try: return int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
[ "def", "value_to_python", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "bytes", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be bytes\"", ")", "if", "value", "is", "None", ":", "return", "value", "try", ":", "return", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is invalid integer\"", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L254-L268
Karaage-Cluster/python-tldap
tldap/fields.py
IntegerField.value_to_db
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, six.integer_types) return str(value).encode("utf_8")
python
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, six.integer_types) return str(value).encode("utf_8")
[ "def", "value_to_db", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "six", ".", "integer_types", ")", "return", "str", "(", "value", ")", ".", "encode", "(", "\"utf_8\"", ")" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L270-L273
Karaage-Cluster/python-tldap
tldap/fields.py
IntegerField.value_validate
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, six.integer_types): raise tldap.exceptions.ValidationError("should be a integer") try: return str(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
python
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, six.integer_types): raise tldap.exceptions.ValidationError("should be a integer") try: return str(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
[ "def", "value_validate", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "integer_types", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a integer\"", ")", "try", ":", "return", "str", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is invalid integer\"", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L275-L288
Karaage-Cluster/python-tldap
tldap/fields.py
DaysSinceEpochField.value_to_python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") try: value = int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer") try: value = datetime.date.fromtimestamp(value * 24 * 60 * 60) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return value
python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") try: value = int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer") try: value = datetime.date.fromtimestamp(value * 24 * 60 * 60) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return value
[ "def", "value_to_python", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "bytes", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a bytes\"", ")", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is invalid integer\"", ")", "try", ":", "value", "=", "datetime", ".", "date", ".", "fromtimestamp", "(", "value", "*", "24", "*", "60", "*", "60", ")", "except", "OverflowError", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is too big a date\"", ")", "return", "value" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L294-L314
Karaage-Cluster/python-tldap
tldap/fields.py
DaysSinceEpochField.value_to_db
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.date) assert not isinstance(value, datetime.datetime) try: value = value - datetime.date(year=1970, month=1, day=1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return str(value.days).encode("utf_8")
python
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.date) assert not isinstance(value, datetime.datetime) try: value = value - datetime.date(year=1970, month=1, day=1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return str(value.days).encode("utf_8")
[ "def", "value_to_db", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", "assert", "not", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", "try", ":", "value", "=", "value", "-", "datetime", ".", "date", "(", "year", "=", "1970", ",", "month", "=", "1", ",", "day", "=", "1", ")", "except", "OverflowError", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is too big a date\"", ")", "return", "str", "(", "value", ".", "days", ")", ".", "encode", "(", "\"utf_8\"", ")" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L316-L326
Karaage-Cluster/python-tldap
tldap/fields.py
DaysSinceEpochField.value_validate
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.date): raise tldap.exceptions.ValidationError("is invalid date") # a datetime is also a date but they are not compatable if isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("should be a date, not a datetime")
python
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.date): raise tldap.exceptions.ValidationError("is invalid date") # a datetime is also a date but they are not compatable if isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("should be a date, not a datetime")
[ "def", "value_validate", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is invalid date\"", ")", "# a datetime is also a date but they are not compatable", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a date, not a datetime\"", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L328-L339
Karaage-Cluster/python-tldap
tldap/fields.py
SecondsSinceEpochField.value_to_db
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.datetime) try: value = value - datetime.datetime(1970, 1, 1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") value = value.seconds + value.days * 24 * 3600 value = str(value).encode("utf_8") return value
python
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.datetime) try: value = value - datetime.datetime(1970, 1, 1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") value = value.seconds + value.days * 24 * 3600 value = str(value).encode("utf_8") return value
[ "def", "value_to_db", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", "try", ":", "value", "=", "value", "-", "datetime", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", "except", "OverflowError", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is too big a date\"", ")", "value", "=", "value", ".", "seconds", "+", "value", ".", "days", "*", "24", "*", "3600", "value", "=", "str", "(", "value", ")", ".", "encode", "(", "\"utf_8\"", ")", "return", "value" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L367-L379
Karaage-Cluster/python-tldap
tldap/fields.py
SecondsSinceEpochField.value_validate
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("is invalid date time")
python
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("is invalid date time")
[ "def", "value_validate", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"is invalid date time\"", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L381-L389
Karaage-Cluster/python-tldap
tldap/fields.py
SidField.value_to_python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") length = len(value) - 8 if length % 4 != 0: raise tldap.exceptions.ValidationError("Invalid sid") length = length // 4 array = struct.unpack('<bbbbbbbb' + 'I' * length, value) if array[1] != length: raise tldap.exceptions.ValidationError("Invalid sid") if array[2:7] != (0, 0, 0, 0, 0): raise tldap.exceptions.ValidationError("Invalid sid") array = ("S", ) + array[0:1] + array[7:] return "-".join([str(i) for i in array])
python
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") length = len(value) - 8 if length % 4 != 0: raise tldap.exceptions.ValidationError("Invalid sid") length = length // 4 array = struct.unpack('<bbbbbbbb' + 'I' * length, value) if array[1] != length: raise tldap.exceptions.ValidationError("Invalid sid") if array[2:7] != (0, 0, 0, 0, 0): raise tldap.exceptions.ValidationError("Invalid sid") array = ("S", ) + array[0:1] + array[7:] return "-".join([str(i) for i in array])
[ "def", "value_to_python", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "bytes", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"should be a bytes\"", ")", "length", "=", "len", "(", "value", ")", "-", "8", "if", "length", "%", "4", "!=", "0", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "length", "=", "length", "//", "4", "array", "=", "struct", ".", "unpack", "(", "'<bbbbbbbb'", "+", "'I'", "*", "length", ",", "value", ")", "if", "array", "[", "1", "]", "!=", "length", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "if", "array", "[", "2", ":", "7", "]", "!=", "(", "0", ",", "0", ",", "0", ",", "0", ",", "0", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "array", "=", "(", "\"S\"", ",", ")", "+", "array", "[", "0", ":", "1", "]", "+", "array", "[", "7", ":", "]", "return", "\"-\"", ".", "join", "(", "[", "str", "(", "i", ")", "for", "i", "in", "array", "]", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L395-L420
Karaage-Cluster/python-tldap
tldap/fields.py
SidField.value_to_db
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, str) array = value.split("-") length = len(array) - 3 assert length >= 0 assert array[0] == 'S' array = array[1:2] + [length, 0, 0, 0, 0, 0] + array[2:] array = [int(i) for i in array] return struct.pack('<bbbbbbbb' + 'I' * length, *array)
python
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, str) array = value.split("-") length = len(array) - 3 assert length >= 0 assert array[0] == 'S' array = array[1:2] + [length, 0, 0, 0, 0, 0] + array[2:] array = [int(i) for i in array] return struct.pack('<bbbbbbbb' + 'I' * length, *array)
[ "def", "value_to_db", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "str", ")", "array", "=", "value", ".", "split", "(", "\"-\"", ")", "length", "=", "len", "(", "array", ")", "-", "3", "assert", "length", ">=", "0", "assert", "array", "[", "0", "]", "==", "'S'", "array", "=", "array", "[", "1", ":", "2", "]", "+", "[", "length", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", "]", "+", "array", "[", "2", ":", "]", "array", "=", "[", "int", "(", "i", ")", "for", "i", "in", "array", "]", "return", "struct", ".", "pack", "(", "'<bbbbbbbb'", "+", "'I'", "*", "length", ",", "*", "array", ")" ]
Returns field's single value prepared for saving into a database.
[ "Returns", "field", "s", "single", "value", "prepared", "for", "saving", "into", "a", "database", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L422-L436
Karaage-Cluster/python-tldap
tldap/fields.py
SidField.value_validate
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, str): raise tldap.exceptions.ValidationError("Invalid sid") array = value.split("-") length = len(array) - 3 if length < 1: raise tldap.exceptions.ValidationError("Invalid sid") if array.pop(0) != "S": raise tldap.exceptions.ValidationError("Invalid sid") try: [int(i) for i in array] except TypeError: raise tldap.exceptions.ValidationError("Invalid sid")
python
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, str): raise tldap.exceptions.ValidationError("Invalid sid") array = value.split("-") length = len(array) - 3 if length < 1: raise tldap.exceptions.ValidationError("Invalid sid") if array.pop(0) != "S": raise tldap.exceptions.ValidationError("Invalid sid") try: [int(i) for i in array] except TypeError: raise tldap.exceptions.ValidationError("Invalid sid")
[ "def", "value_validate", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "str", ")", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "array", "=", "value", ".", "split", "(", "\"-\"", ")", "length", "=", "len", "(", "array", ")", "-", "3", "if", "length", "<", "1", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "if", "array", ".", "pop", "(", "0", ")", "!=", "\"S\"", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")", "try", ":", "[", "int", "(", "i", ")", "for", "i", "in", "array", "]", "except", "TypeError", ":", "raise", "tldap", ".", "exceptions", ".", "ValidationError", "(", "\"Invalid sid\"", ")" ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
[ "Converts", "the", "input", "single", "value", "into", "the", "expected", "Python", "data", "type", "raising", "django", ".", "core", ".", "exceptions", ".", "ValidationError", "if", "the", "data", "can", "t", "be", "converted", ".", "Returns", "the", "converted", "value", ".", "Subclasses", "should", "override", "this", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/fields.py#L438-L460
quantmind/agile-toolkit
agiletoolkit/api/components.py
RepoComponents.get
def get(self, id): """Get data for this component """ id = self.as_id(id) url = '%s/%s' % (self, id) response = self.http.get(url, auth=self.auth) response.raise_for_status() return response.json()
python
def get(self, id): """Get data for this component """ id = self.as_id(id) url = '%s/%s' % (self, id) response = self.http.get(url, auth=self.auth) response.raise_for_status() return response.json()
[ "def", "get", "(", "self", ",", "id", ")", ":", "id", "=", "self", ".", "as_id", "(", "id", ")", "url", "=", "'%s/%s'", "%", "(", "self", ",", "id", ")", "response", "=", "self", ".", "http", ".", "get", "(", "url", ",", "auth", "=", "self", ".", "auth", ")", "response", ".", "raise_for_status", "(", ")", "return", "response", ".", "json", "(", ")" ]
Get data for this component
[ "Get", "data", "for", "this", "component" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L38-L45
quantmind/agile-toolkit
agiletoolkit/api/components.py
RepoComponents.create
def create(self, data): """Create a new component """ response = self.http.post(str(self), json=data, auth=self.auth) response.raise_for_status() return response.json()
python
def create(self, data): """Create a new component """ response = self.http.post(str(self), json=data, auth=self.auth) response.raise_for_status() return response.json()
[ "def", "create", "(", "self", ",", "data", ")", ":", "response", "=", "self", ".", "http", ".", "post", "(", "str", "(", "self", ")", ",", "json", "=", "data", ",", "auth", "=", "self", ".", "auth", ")", "response", ".", "raise_for_status", "(", ")", "return", "response", ".", "json", "(", ")" ]
Create a new component
[ "Create", "a", "new", "component" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L47-L52
quantmind/agile-toolkit
agiletoolkit/api/components.py
RepoComponents.update
def update(self, id, data): """Update a component """ id = self.as_id(id) response = self.http.patch( '%s/%s' % (self, id), json=data, auth=self.auth ) response.raise_for_status() return response.json()
python
def update(self, id, data): """Update a component """ id = self.as_id(id) response = self.http.patch( '%s/%s' % (self, id), json=data, auth=self.auth ) response.raise_for_status() return response.json()
[ "def", "update", "(", "self", ",", "id", ",", "data", ")", ":", "id", "=", "self", ".", "as_id", "(", "id", ")", "response", "=", "self", ".", "http", ".", "patch", "(", "'%s/%s'", "%", "(", "self", ",", "id", ")", ",", "json", "=", "data", ",", "auth", "=", "self", ".", "auth", ")", "response", ".", "raise_for_status", "(", ")", "return", "response", ".", "json", "(", ")" ]
Update a component
[ "Update", "a", "component" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L54-L62
quantmind/agile-toolkit
agiletoolkit/api/components.py
RepoComponents.delete
def delete(self, id): """Delete a component by id """ id = self.as_id(id) response = self.http.delete( '%s/%s' % (self.api_url, id), auth=self.auth) response.raise_for_status()
python
def delete(self, id): """Delete a component by id """ id = self.as_id(id) response = self.http.delete( '%s/%s' % (self.api_url, id), auth=self.auth) response.raise_for_status()
[ "def", "delete", "(", "self", ",", "id", ")", ":", "id", "=", "self", ".", "as_id", "(", "id", ")", "response", "=", "self", ".", "http", ".", "delete", "(", "'%s/%s'", "%", "(", "self", ".", "api_url", ",", "id", ")", ",", "auth", "=", "self", ".", "auth", ")", "response", ".", "raise_for_status", "(", ")" ]
Delete a component by id
[ "Delete", "a", "component", "by", "id" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L64-L71
quantmind/agile-toolkit
agiletoolkit/api/components.py
RepoComponents.get_list
def get_list(self, url=None, callback=None, limit=100, **data): """Get a list of this github component :param url: full url :param Comp: a :class:`.Component` class :param callback: Optional callback :param limit: Optional number of items to retrieve :param data: additional query data :return: a list of ``Comp`` objects with data """ url = url or str(self) data = dict(((k, v) for k, v in data.items() if v)) all_data = [] if limit: data['per_page'] = min(limit, 100) while url: response = self.http.get(url, params=data, auth=self.auth) response.raise_for_status() result = response.json() n = m = len(result) if callback: result = callback(result) m = len(result) all_data.extend(result) if limit and len(all_data) > limit: all_data = all_data[:limit] break elif m == n: data = None next = response.links.get('next', {}) url = next.get('url') else: break return all_data
python
def get_list(self, url=None, callback=None, limit=100, **data): """Get a list of this github component :param url: full url :param Comp: a :class:`.Component` class :param callback: Optional callback :param limit: Optional number of items to retrieve :param data: additional query data :return: a list of ``Comp`` objects with data """ url = url or str(self) data = dict(((k, v) for k, v in data.items() if v)) all_data = [] if limit: data['per_page'] = min(limit, 100) while url: response = self.http.get(url, params=data, auth=self.auth) response.raise_for_status() result = response.json() n = m = len(result) if callback: result = callback(result) m = len(result) all_data.extend(result) if limit and len(all_data) > limit: all_data = all_data[:limit] break elif m == n: data = None next = response.links.get('next', {}) url = next.get('url') else: break return all_data
[ "def", "get_list", "(", "self", ",", "url", "=", "None", ",", "callback", "=", "None", ",", "limit", "=", "100", ",", "*", "*", "data", ")", ":", "url", "=", "url", "or", "str", "(", "self", ")", "data", "=", "dict", "(", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", "if", "v", ")", ")", "all_data", "=", "[", "]", "if", "limit", ":", "data", "[", "'per_page'", "]", "=", "min", "(", "limit", ",", "100", ")", "while", "url", ":", "response", "=", "self", ".", "http", ".", "get", "(", "url", ",", "params", "=", "data", ",", "auth", "=", "self", ".", "auth", ")", "response", ".", "raise_for_status", "(", ")", "result", "=", "response", ".", "json", "(", ")", "n", "=", "m", "=", "len", "(", "result", ")", "if", "callback", ":", "result", "=", "callback", "(", "result", ")", "m", "=", "len", "(", "result", ")", "all_data", ".", "extend", "(", "result", ")", "if", "limit", "and", "len", "(", "all_data", ")", ">", "limit", ":", "all_data", "=", "all_data", "[", ":", "limit", "]", "break", "elif", "m", "==", "n", ":", "data", "=", "None", "next", "=", "response", ".", "links", ".", "get", "(", "'next'", ",", "{", "}", ")", "url", "=", "next", ".", "get", "(", "'url'", ")", "else", ":", "break", "return", "all_data" ]
Get a list of this github component :param url: full url :param Comp: a :class:`.Component` class :param callback: Optional callback :param limit: Optional number of items to retrieve :param data: additional query data :return: a list of ``Comp`` objects with data
[ "Get", "a", "list", "of", "this", "github", "component", ":", "param", "url", ":", "full", "url", ":", "param", "Comp", ":", "a", ":", "class", ":", ".", "Component", "class", ":", "param", "callback", ":", "Optional", "callback", ":", "param", "limit", ":", "Optional", "number", "of", "items", "to", "retrieve", ":", "param", "data", ":", "additional", "query", "data", ":", "return", ":", "a", "list", "of", "Comp", "objects", "with", "data" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L73-L105
quantmind/agile-toolkit
agiletoolkit/api/components.py
Issues.comments
def comments(self, issue): """Return all comments for this issue/pull request """ commit = self.as_id(issue) return self.get_list(url='%s/%s/comments' % (self, commit))
python
def comments(self, issue): """Return all comments for this issue/pull request """ commit = self.as_id(issue) return self.get_list(url='%s/%s/comments' % (self, commit))
[ "def", "comments", "(", "self", ",", "issue", ")", ":", "commit", "=", "self", ".", "as_id", "(", "issue", ")", "return", "self", ".", "get_list", "(", "url", "=", "'%s/%s/comments'", "%", "(", "self", ",", "commit", ")", ")" ]
Return all comments for this issue/pull request
[ "Return", "all", "comments", "for", "this", "issue", "/", "pull", "request" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/components.py#L139-L143
developersociety/django-glitter
glitter/admin.py
GlitterAdminMixin.has_edit_permission
def has_edit_permission(self, request, obj=None, version=None): """ Returns a boolean if the user in the request has edit permission for the object. Can also be passed a version object to check if the user has permission to edit a version of the object (if they own it). """ # Has the edit permission for this object type permission_name = '{}.edit_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) if has_permission and version is not None: # Version must not be saved, and must belong to this user if version.version_number or version.owner != request.user: has_permission = False return has_permission
python
def has_edit_permission(self, request, obj=None, version=None): """ Returns a boolean if the user in the request has edit permission for the object. Can also be passed a version object to check if the user has permission to edit a version of the object (if they own it). """ # Has the edit permission for this object type permission_name = '{}.edit_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) if has_permission and version is not None: # Version must not be saved, and must belong to this user if version.version_number or version.owner != request.user: has_permission = False return has_permission
[ "def", "has_edit_permission", "(", "self", ",", "request", ",", "obj", "=", "None", ",", "version", "=", "None", ")", ":", "# Has the edit permission for this object type", "permission_name", "=", "'{}.edit_{}'", ".", "format", "(", "self", ".", "opts", ".", "app_label", ",", "self", ".", "opts", ".", "model_name", ")", "has_permission", "=", "request", ".", "user", ".", "has_perm", "(", "permission_name", ")", "if", "obj", "is", "not", "None", "and", "has_permission", "is", "False", ":", "has_permission", "=", "request", ".", "user", ".", "has_perm", "(", "permission_name", ",", "obj", "=", "obj", ")", "if", "has_permission", "and", "version", "is", "not", "None", ":", "# Version must not be saved, and must belong to this user", "if", "version", ".", "version_number", "or", "version", ".", "owner", "!=", "request", ".", "user", ":", "has_permission", "=", "False", "return", "has_permission" ]
Returns a boolean if the user in the request has edit permission for the object. Can also be passed a version object to check if the user has permission to edit a version of the object (if they own it).
[ "Returns", "a", "boolean", "if", "the", "user", "in", "the", "request", "has", "edit", "permission", "for", "the", "object", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/admin.py#L120-L139
developersociety/django-glitter
glitter/admin.py
GlitterAdminMixin.has_publish_permission
def has_publish_permission(self, request, obj=None): """ Returns a boolean if the user in the request has publish permission for the object. """ permission_name = '{}.publish_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) return has_permission
python
def has_publish_permission(self, request, obj=None): """ Returns a boolean if the user in the request has publish permission for the object. """ permission_name = '{}.publish_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) return has_permission
[ "def", "has_publish_permission", "(", "self", ",", "request", ",", "obj", "=", "None", ")", ":", "permission_name", "=", "'{}.publish_{}'", ".", "format", "(", "self", ".", "opts", ".", "app_label", ",", "self", ".", "opts", ".", "model_name", ")", "has_permission", "=", "request", ".", "user", ".", "has_perm", "(", "permission_name", ")", "if", "obj", "is", "not", "None", "and", "has_permission", "is", "False", ":", "has_permission", "=", "request", ".", "user", ".", "has_perm", "(", "permission_name", ",", "obj", "=", "obj", ")", "return", "has_permission" ]
Returns a boolean if the user in the request has publish permission for the object.
[ "Returns", "a", "boolean", "if", "the", "user", "in", "the", "request", "has", "publish", "permission", "for", "the", "object", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/admin.py#L141-L151
quantmind/agile-toolkit
agiletoolkit/utils.py
semantic_version
def semantic_version(tag): """Get a valid semantic version for tag """ try: version = list(map(int, tag.split('.'))) assert len(version) == 3 return tuple(version) except Exception as exc: raise CommandError( 'Could not parse "%s", please use ' 'MAJOR.MINOR.PATCH' % tag ) from exc
python
def semantic_version(tag): """Get a valid semantic version for tag """ try: version = list(map(int, tag.split('.'))) assert len(version) == 3 return tuple(version) except Exception as exc: raise CommandError( 'Could not parse "%s", please use ' 'MAJOR.MINOR.PATCH' % tag ) from exc
[ "def", "semantic_version", "(", "tag", ")", ":", "try", ":", "version", "=", "list", "(", "map", "(", "int", ",", "tag", ".", "split", "(", "'.'", ")", ")", ")", "assert", "len", "(", "version", ")", "==", "3", "return", "tuple", "(", "version", ")", "except", "Exception", "as", "exc", ":", "raise", "CommandError", "(", "'Could not parse \"%s\", please use '", "'MAJOR.MINOR.PATCH'", "%", "tag", ")", "from", "exc" ]
Get a valid semantic version for tag
[ "Get", "a", "valid", "semantic", "version", "for", "tag" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/utils.py#L99-L110
davidblaisonneau-orange/foreman
foreman/item.py
ForemanItem.load
def load(self, data): """ Function load Store the object data """ self.clear() self.update(data) self.enhance()
python
def load(self, data): """ Function load Store the object data """ self.clear() self.update(data) self.enhance()
[ "def", "load", "(", "self", ",", "data", ")", ":", "self", ".", "clear", "(", ")", "self", ".", "update", "(", "data", ")", "self", ".", "enhance", "(", ")" ]
Function load Store the object data
[ "Function", "load", "Store", "the", "object", "data" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/item.py#L53-L59
davidblaisonneau-orange/foreman
foreman/item.py
ForemanItem.enhance
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ if self.objName in ['hosts', 'hostgroups', 'puppet_classes']: from foreman.itemSmartClassParameter\ import ItemSmartClassParameter self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, ItemSmartClassParameter)})
python
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ if self.objName in ['hosts', 'hostgroups', 'puppet_classes']: from foreman.itemSmartClassParameter\ import ItemSmartClassParameter self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, ItemSmartClassParameter)})
[ "def", "enhance", "(", "self", ")", ":", "if", "self", ".", "objName", "in", "[", "'hosts'", ",", "'hostgroups'", ",", "'puppet_classes'", "]", ":", "from", "foreman", ".", "itemSmartClassParameter", "import", "ItemSmartClassParameter", "self", ".", "update", "(", "{", "'smart_class_parameters'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "ItemSmartClassParameter", ")", "}", ")" ]
Function enhance Enhance the object with new item or enhanced items
[ "Function", "enhance", "Enhance", "the", "object", "with", "new", "item", "or", "enhanced", "items" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/item.py#L61-L72
davidblaisonneau-orange/foreman
foreman/item.py
ForemanItem.reload
def reload(self): """ Function reload Sync the full object """ self.load(self.api.get(self.objName, self.key))
python
def reload(self): """ Function reload Sync the full object """ self.load(self.api.get(self.objName, self.key))
[ "def", "reload", "(", "self", ")", ":", "self", ".", "load", "(", "self", ".", "api", ".", "get", "(", "self", ".", "objName", ",", "self", ".", "key", ")", ")" ]
Function reload Sync the full object
[ "Function", "reload", "Sync", "the", "full", "object" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/item.py#L74-L78