repository_name
stringlengths 7
55
| func_path_in_repository
stringlengths 4
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 75
104k
| language
stringclasses 1
value | func_code_string
stringlengths 75
104k
| func_code_tokens
sequencelengths 19
28.4k
| func_documentation_string
stringlengths 1
46.9k
| func_documentation_tokens
sequencelengths 1
1.97k
| split_name
stringclasses 1
value | func_code_url
stringlengths 87
315
|
---|---|---|---|---|---|---|---|---|---|---|
fchorney/rpI2C | rpI2C.py | I2C.write_block_data | def write_block_data(self, cmd, block):
"""
Writes a block of bytes to the bus using I2C format to the specified
command register
"""
self.bus.write_i2c_block_data(self.address, cmd, block)
self.log.debug(
"write_block_data: Wrote [%s] to command register 0x%02X" % (
', '.join(['0x%02X' % x for x in block]),
cmd
)
) | python | def write_block_data(self, cmd, block):
"""
Writes a block of bytes to the bus using I2C format to the specified
command register
"""
self.bus.write_i2c_block_data(self.address, cmd, block)
self.log.debug(
"write_block_data: Wrote [%s] to command register 0x%02X" % (
', '.join(['0x%02X' % x for x in block]),
cmd
)
) | [
"def",
"write_block_data",
"(",
"self",
",",
"cmd",
",",
"block",
")",
":",
"self",
".",
"bus",
".",
"write_i2c_block_data",
"(",
"self",
".",
"address",
",",
"cmd",
",",
"block",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"write_block_data: Wrote [%s] to command register 0x%02X\"",
"%",
"(",
"', '",
".",
"join",
"(",
"[",
"'0x%02X'",
"%",
"x",
"for",
"x",
"in",
"block",
"]",
")",
",",
"cmd",
")",
")"
] | Writes a block of bytes to the bus using I2C format to the specified
command register | [
"Writes",
"a",
"block",
"of",
"bytes",
"to",
"the",
"bus",
"using",
"I2C",
"format",
"to",
"the",
"specified",
"command",
"register"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L75-L86 |
fchorney/rpI2C | rpI2C.py | I2C.read_raw_byte | def read_raw_byte(self):
"""
Read an 8-bit byte directly from the bus
"""
result = self.bus.read_byte(self.address)
self.log.debug("read_raw_byte: Read 0x%02X from the bus" % result)
return result | python | def read_raw_byte(self):
"""
Read an 8-bit byte directly from the bus
"""
result = self.bus.read_byte(self.address)
self.log.debug("read_raw_byte: Read 0x%02X from the bus" % result)
return result | [
"def",
"read_raw_byte",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"bus",
".",
"read_byte",
"(",
"self",
".",
"address",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"read_raw_byte: Read 0x%02X from the bus\"",
"%",
"result",
")",
"return",
"result"
] | Read an 8-bit byte directly from the bus | [
"Read",
"an",
"8",
"-",
"bit",
"byte",
"directly",
"from",
"the",
"bus"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L90-L96 |
fchorney/rpI2C | rpI2C.py | I2C.read_block_data | def read_block_data(self, cmd, length):
"""
Read a block of bytes from the bus from the specified command register
Amount of bytes read in is defined by length
"""
results = self.bus.read_i2c_block_data(self.address, cmd, length)
self.log.debug(
"read_block_data: Read [%s] from command register 0x%02X" % (
', '.join(['0x%02X' % x for x in results]),
cmd
)
)
return results | python | def read_block_data(self, cmd, length):
"""
Read a block of bytes from the bus from the specified command register
Amount of bytes read in is defined by length
"""
results = self.bus.read_i2c_block_data(self.address, cmd, length)
self.log.debug(
"read_block_data: Read [%s] from command register 0x%02X" % (
', '.join(['0x%02X' % x for x in results]),
cmd
)
)
return results | [
"def",
"read_block_data",
"(",
"self",
",",
"cmd",
",",
"length",
")",
":",
"results",
"=",
"self",
".",
"bus",
".",
"read_i2c_block_data",
"(",
"self",
".",
"address",
",",
"cmd",
",",
"length",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"read_block_data: Read [%s] from command register 0x%02X\"",
"%",
"(",
"', '",
".",
"join",
"(",
"[",
"'0x%02X'",
"%",
"x",
"for",
"x",
"in",
"results",
"]",
")",
",",
"cmd",
")",
")",
"return",
"results"
] | Read a block of bytes from the bus from the specified command register
Amount of bytes read in is defined by length | [
"Read",
"a",
"block",
"of",
"bytes",
"from",
"the",
"bus",
"from",
"the",
"specified",
"command",
"register",
"Amount",
"of",
"bytes",
"read",
"in",
"is",
"defined",
"by",
"length"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L98-L110 |
fchorney/rpI2C | rpI2C.py | I2C.read_unsigned_byte | def read_unsigned_byte(self, cmd):
"""
Read an unsigned byte from the specified command register
"""
result = self.bus.read_byte_data(self.address, cmd)
self.log.debug(
"read_unsigned_byte: Read 0x%02X from command register 0x%02X" % (
result, cmd
)
)
return result | python | def read_unsigned_byte(self, cmd):
"""
Read an unsigned byte from the specified command register
"""
result = self.bus.read_byte_data(self.address, cmd)
self.log.debug(
"read_unsigned_byte: Read 0x%02X from command register 0x%02X" % (
result, cmd
)
)
return result | [
"def",
"read_unsigned_byte",
"(",
"self",
",",
"cmd",
")",
":",
"result",
"=",
"self",
".",
"bus",
".",
"read_byte_data",
"(",
"self",
".",
"address",
",",
"cmd",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"read_unsigned_byte: Read 0x%02X from command register 0x%02X\"",
"%",
"(",
"result",
",",
"cmd",
")",
")",
"return",
"result"
] | Read an unsigned byte from the specified command register | [
"Read",
"an",
"unsigned",
"byte",
"from",
"the",
"specified",
"command",
"register"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L112-L122 |
fchorney/rpI2C | rpI2C.py | I2C.read_unsigned_word | def read_unsigned_word(self, cmd, little_endian=True):
"""
Read an unsigned word from the specified command register
We assume the data is in little endian mode, if it is in big endian
mode then set little_endian to False
"""
result = self.bus.read_word_data(self.address, cmd)
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
self.log.debug(
"read_unsigned_word: Read 0x%04X from command register 0x%02X" % (
result, cmd
)
)
return result | python | def read_unsigned_word(self, cmd, little_endian=True):
"""
Read an unsigned word from the specified command register
We assume the data is in little endian mode, if it is in big endian
mode then set little_endian to False
"""
result = self.bus.read_word_data(self.address, cmd)
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
self.log.debug(
"read_unsigned_word: Read 0x%04X from command register 0x%02X" % (
result, cmd
)
)
return result | [
"def",
"read_unsigned_word",
"(",
"self",
",",
"cmd",
",",
"little_endian",
"=",
"True",
")",
":",
"result",
"=",
"self",
".",
"bus",
".",
"read_word_data",
"(",
"self",
".",
"address",
",",
"cmd",
")",
"if",
"not",
"little_endian",
":",
"result",
"=",
"(",
"(",
"result",
"<<",
"8",
")",
"&",
"0xFF00",
")",
"+",
"(",
"result",
">>",
"8",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"read_unsigned_word: Read 0x%04X from command register 0x%02X\"",
"%",
"(",
"result",
",",
"cmd",
")",
")",
"return",
"result"
] | Read an unsigned word from the specified command register
We assume the data is in little endian mode, if it is in big endian
mode then set little_endian to False | [
"Read",
"an",
"unsigned",
"word",
"from",
"the",
"specified",
"command",
"register",
"We",
"assume",
"the",
"data",
"is",
"in",
"little",
"endian",
"mode",
"if",
"it",
"is",
"in",
"big",
"endian",
"mode",
"then",
"set",
"little_endian",
"to",
"False"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L141-L157 |
fchorney/rpI2C | rpI2C.py | I2C.__connect_to_bus | def __connect_to_bus(self, bus):
"""
Attempt to connect to an I2C bus
"""
def connect(bus_num):
try:
self.log.debug("Attempting to connect to bus %s..." % bus_num)
self.bus = smbus.SMBus(bus_num)
self.log.debug("Success")
except IOError:
self.log.debug("Failed")
raise
# If the bus is not explicitly stated, try 0 and then try 1 if that
# fails
if bus is None:
try:
connect(0)
return
except IOError:
pass
try:
connect(1)
return
except IOError:
raise
else:
try:
connect(bus)
return
except IOError:
raise | python | def __connect_to_bus(self, bus):
"""
Attempt to connect to an I2C bus
"""
def connect(bus_num):
try:
self.log.debug("Attempting to connect to bus %s..." % bus_num)
self.bus = smbus.SMBus(bus_num)
self.log.debug("Success")
except IOError:
self.log.debug("Failed")
raise
# If the bus is not explicitly stated, try 0 and then try 1 if that
# fails
if bus is None:
try:
connect(0)
return
except IOError:
pass
try:
connect(1)
return
except IOError:
raise
else:
try:
connect(bus)
return
except IOError:
raise | [
"def",
"__connect_to_bus",
"(",
"self",
",",
"bus",
")",
":",
"def",
"connect",
"(",
"bus_num",
")",
":",
"try",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Attempting to connect to bus %s...\"",
"%",
"bus_num",
")",
"self",
".",
"bus",
"=",
"smbus",
".",
"SMBus",
"(",
"bus_num",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Success\"",
")",
"except",
"IOError",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Failed\"",
")",
"raise",
"# If the bus is not explicitly stated, try 0 and then try 1 if that",
"# fails",
"if",
"bus",
"is",
"None",
":",
"try",
":",
"connect",
"(",
"0",
")",
"return",
"except",
"IOError",
":",
"pass",
"try",
":",
"connect",
"(",
"1",
")",
"return",
"except",
"IOError",
":",
"raise",
"else",
":",
"try",
":",
"connect",
"(",
"bus",
")",
"return",
"except",
"IOError",
":",
"raise"
] | Attempt to connect to an I2C bus | [
"Attempt",
"to",
"connect",
"to",
"an",
"I2C",
"bus"
] | train | https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L181-L213 |
developersociety/django-glitter | glitter/reminders/admin.py | ReminderInline.get_formset | def get_formset(self, request, obj=None, **kwargs):
""" Default user to the current version owner. """
data = super().get_formset(request, obj, **kwargs)
if obj:
data.form.base_fields['user'].initial = request.user.id
return data | python | def get_formset(self, request, obj=None, **kwargs):
""" Default user to the current version owner. """
data = super().get_formset(request, obj, **kwargs)
if obj:
data.form.base_fields['user'].initial = request.user.id
return data | [
"def",
"get_formset",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"super",
"(",
")",
".",
"get_formset",
"(",
"request",
",",
"obj",
",",
"*",
"*",
"kwargs",
")",
"if",
"obj",
":",
"data",
".",
"form",
".",
"base_fields",
"[",
"'user'",
"]",
".",
"initial",
"=",
"request",
".",
"user",
".",
"id",
"return",
"data"
] | Default user to the current version owner. | [
"Default",
"user",
"to",
"the",
"current",
"version",
"owner",
"."
] | train | https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/reminders/admin.py#L15-L20 |
davidblaisonneau-orange/foreman | foreman/objects.py | ForemanObjects.reload | def reload(self):
""" Function reload
Reload the full object to ensure sync
"""
realData = self.load()
self.clear()
self.update(realData) | python | def reload(self):
""" Function reload
Reload the full object to ensure sync
"""
realData = self.load()
self.clear()
self.update(realData) | [
"def",
"reload",
"(",
"self",
")",
":",
"realData",
"=",
"self",
".",
"load",
"(",
")",
"self",
".",
"clear",
"(",
")",
"self",
".",
"update",
"(",
"realData",
")"
] | Function reload
Reload the full object to ensure sync | [
"Function",
"reload",
"Reload",
"the",
"full",
"object",
"to",
"ensure",
"sync"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/objects.py#L61-L67 |
davidblaisonneau-orange/foreman | foreman/objects.py | ForemanObjects.updateAfterDecorator | def updateAfterDecorator(function):
""" Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman
"""
def _updateAfterDecorator(self, *args, **kwargs):
ret = function(self, *args, **kwargs)
self.reload()
return ret
return _updateAfterDecorator | python | def updateAfterDecorator(function):
""" Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman
"""
def _updateAfterDecorator(self, *args, **kwargs):
ret = function(self, *args, **kwargs)
self.reload()
return ret
return _updateAfterDecorator | [
"def",
"updateAfterDecorator",
"(",
"function",
")",
":",
"def",
"_updateAfterDecorator",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"reload",
"(",
")",
"return",
"ret",
"return",
"_updateAfterDecorator"
] | Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman | [
"Function",
"updateAfterDecorator",
"Decorator",
"to",
"ensure",
"local",
"dict",
"is",
"sync",
"with",
"remote",
"foreman"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/objects.py#L70-L78 |
davidblaisonneau-orange/foreman | foreman/objects.py | ForemanObjects.updateBeforeDecorator | def updateBeforeDecorator(function):
""" Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman
"""
def _updateBeforeDecorator(self, *args, **kwargs):
if self.forceFullSync:
self.reload()
return function(self, *args, **kwargs)
return _updateBeforeDecorator | python | def updateBeforeDecorator(function):
""" Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman
"""
def _updateBeforeDecorator(self, *args, **kwargs):
if self.forceFullSync:
self.reload()
return function(self, *args, **kwargs)
return _updateBeforeDecorator | [
"def",
"updateBeforeDecorator",
"(",
"function",
")",
":",
"def",
"_updateBeforeDecorator",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"forceFullSync",
":",
"self",
".",
"reload",
"(",
")",
"return",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"_updateBeforeDecorator"
] | Function updateAfterDecorator
Decorator to ensure local dict is sync with remote foreman | [
"Function",
"updateAfterDecorator",
"Decorator",
"to",
"ensure",
"local",
"dict",
"is",
"sync",
"with",
"remote",
"foreman"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/objects.py#L80-L88 |
davidblaisonneau-orange/foreman | foreman/objects.py | ForemanObjects.load | def load(self):
""" Function load
Get the list of all objects
@return RETURN: A ForemanItem list
"""
return {x[self.index]: self.itemType(self.api, x['id'],
self.objName, self.payloadObj,
x)
for x in self.api.list(self.objName,
limit=self.searchLimit)} | python | def load(self):
""" Function load
Get the list of all objects
@return RETURN: A ForemanItem list
"""
return {x[self.index]: self.itemType(self.api, x['id'],
self.objName, self.payloadObj,
x)
for x in self.api.list(self.objName,
limit=self.searchLimit)} | [
"def",
"load",
"(",
"self",
")",
":",
"return",
"{",
"x",
"[",
"self",
".",
"index",
"]",
":",
"self",
".",
"itemType",
"(",
"self",
".",
"api",
",",
"x",
"[",
"'id'",
"]",
",",
"self",
".",
"objName",
",",
"self",
".",
"payloadObj",
",",
"x",
")",
"for",
"x",
"in",
"self",
".",
"api",
".",
"list",
"(",
"self",
".",
"objName",
",",
"limit",
"=",
"self",
".",
"searchLimit",
")",
"}"
] | Function load
Get the list of all objects
@return RETURN: A ForemanItem list | [
"Function",
"load",
"Get",
"the",
"list",
"of",
"all",
"objects"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/objects.py#L141-L151 |
davidblaisonneau-orange/foreman | foreman/objects.py | ForemanObjects.checkAndCreate | def checkAndCreate(self, key, payload):
""" Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object
"""
if key not in self:
self[key] = payload
return self[key]['id'] | python | def checkAndCreate(self, key, payload):
""" Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object
"""
if key not in self:
self[key] = payload
return self[key]['id'] | [
"def",
"checkAndCreate",
"(",
"self",
",",
"key",
",",
"payload",
")",
":",
"if",
"key",
"not",
"in",
"self",
":",
"self",
"[",
"key",
"]",
"=",
"payload",
"return",
"self",
"[",
"key",
"]",
"[",
"'id'",
"]"
] | Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object | [
"Function",
"checkAndCreate",
"Check",
"if",
"an",
"object",
"exists",
"and",
"create",
"it",
"if",
"not"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/objects.py#L163-L173 |
qubell/contrib-python-qubell-client | qubell/api/private/__init__.py | operations | def operations():
"""
Class decorator stores all calls into list.
Can be used until .invalidate() is called.
:return: decorated class
"""
def decorator(func):
@wraps(func)
def wrapped_func(*args, **kwargs):
self = args[0]
assert self.__can_use, "User operation queue only in 'with' block"
def defaults_dict():
f_args, varargs, keywords, defaults = inspect.getargspec(func)
defaults = defaults or []
return dict(zip(f_args[-len(defaults)+len(args[1:]):], defaults[len(args[1:]):]))
route_args = dict(defaults_dict().items() + kwargs.items())
func(*args, **kwargs)
self.operations.append((func.__name__, args[1:], route_args, ))
return wrapped_func
def decorate(clazz):
for attr in clazz.__dict__:
if callable(getattr(clazz, attr)):
setattr(clazz, attr, decorator(getattr(clazz, attr)))
def __init__(self): # simple parameter-less constructor
self.operations = []
self.__can_use = True
def invalidate(self):
self.__can_use = False
clazz.__init__ = __init__
clazz.invalidate = invalidate
return clazz
return decorate | python | def operations():
"""
Class decorator stores all calls into list.
Can be used until .invalidate() is called.
:return: decorated class
"""
def decorator(func):
@wraps(func)
def wrapped_func(*args, **kwargs):
self = args[0]
assert self.__can_use, "User operation queue only in 'with' block"
def defaults_dict():
f_args, varargs, keywords, defaults = inspect.getargspec(func)
defaults = defaults or []
return dict(zip(f_args[-len(defaults)+len(args[1:]):], defaults[len(args[1:]):]))
route_args = dict(defaults_dict().items() + kwargs.items())
func(*args, **kwargs)
self.operations.append((func.__name__, args[1:], route_args, ))
return wrapped_func
def decorate(clazz):
for attr in clazz.__dict__:
if callable(getattr(clazz, attr)):
setattr(clazz, attr, decorator(getattr(clazz, attr)))
def __init__(self): # simple parameter-less constructor
self.operations = []
self.__can_use = True
def invalidate(self):
self.__can_use = False
clazz.__init__ = __init__
clazz.invalidate = invalidate
return clazz
return decorate | [
"def",
"operations",
"(",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapped_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
"=",
"args",
"[",
"0",
"]",
"assert",
"self",
".",
"__can_use",
",",
"\"User operation queue only in 'with' block\"",
"def",
"defaults_dict",
"(",
")",
":",
"f_args",
",",
"varargs",
",",
"keywords",
",",
"defaults",
"=",
"inspect",
".",
"getargspec",
"(",
"func",
")",
"defaults",
"=",
"defaults",
"or",
"[",
"]",
"return",
"dict",
"(",
"zip",
"(",
"f_args",
"[",
"-",
"len",
"(",
"defaults",
")",
"+",
"len",
"(",
"args",
"[",
"1",
":",
"]",
")",
":",
"]",
",",
"defaults",
"[",
"len",
"(",
"args",
"[",
"1",
":",
"]",
")",
":",
"]",
")",
")",
"route_args",
"=",
"dict",
"(",
"defaults_dict",
"(",
")",
".",
"items",
"(",
")",
"+",
"kwargs",
".",
"items",
"(",
")",
")",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"operations",
".",
"append",
"(",
"(",
"func",
".",
"__name__",
",",
"args",
"[",
"1",
":",
"]",
",",
"route_args",
",",
")",
")",
"return",
"wrapped_func",
"def",
"decorate",
"(",
"clazz",
")",
":",
"for",
"attr",
"in",
"clazz",
".",
"__dict__",
":",
"if",
"callable",
"(",
"getattr",
"(",
"clazz",
",",
"attr",
")",
")",
":",
"setattr",
"(",
"clazz",
",",
"attr",
",",
"decorator",
"(",
"getattr",
"(",
"clazz",
",",
"attr",
")",
")",
")",
"def",
"__init__",
"(",
"self",
")",
":",
"# simple parameter-less constructor",
"self",
".",
"operations",
"=",
"[",
"]",
"self",
".",
"__can_use",
"=",
"True",
"def",
"invalidate",
"(",
"self",
")",
":",
"self",
".",
"__can_use",
"=",
"False",
"clazz",
".",
"__init__",
"=",
"__init__",
"clazz",
".",
"invalidate",
"=",
"invalidate",
"return",
"clazz",
"return",
"decorate"
] | Class decorator stores all calls into list.
Can be used until .invalidate() is called.
:return: decorated class | [
"Class",
"decorator",
"stores",
"all",
"calls",
"into",
"list",
".",
"Can",
"be",
"used",
"until",
".",
"invalidate",
"()",
"is",
"called",
".",
":",
"return",
":",
"decorated",
"class"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/__init__.py#L39-L76 |
developersociety/django-glitter | glitter/publisher/utils.py | process_actions | def process_actions(action_ids=None):
"""
Process actions in the publishing schedule.
Returns the number of actions processed.
"""
actions_taken = 0
action_list = PublishAction.objects.prefetch_related(
'content_object',
).filter(
scheduled_time__lte=timezone.now(),
)
if action_ids is not None:
action_list = action_list.filter(id__in=action_ids)
for action in action_list:
action.process_action()
action.delete()
actions_taken += 1
return actions_taken | python | def process_actions(action_ids=None):
"""
Process actions in the publishing schedule.
Returns the number of actions processed.
"""
actions_taken = 0
action_list = PublishAction.objects.prefetch_related(
'content_object',
).filter(
scheduled_time__lte=timezone.now(),
)
if action_ids is not None:
action_list = action_list.filter(id__in=action_ids)
for action in action_list:
action.process_action()
action.delete()
actions_taken += 1
return actions_taken | [
"def",
"process_actions",
"(",
"action_ids",
"=",
"None",
")",
":",
"actions_taken",
"=",
"0",
"action_list",
"=",
"PublishAction",
".",
"objects",
".",
"prefetch_related",
"(",
"'content_object'",
",",
")",
".",
"filter",
"(",
"scheduled_time__lte",
"=",
"timezone",
".",
"now",
"(",
")",
",",
")",
"if",
"action_ids",
"is",
"not",
"None",
":",
"action_list",
"=",
"action_list",
".",
"filter",
"(",
"id__in",
"=",
"action_ids",
")",
"for",
"action",
"in",
"action_list",
":",
"action",
".",
"process_action",
"(",
")",
"action",
".",
"delete",
"(",
")",
"actions_taken",
"+=",
"1",
"return",
"actions_taken"
] | Process actions in the publishing schedule.
Returns the number of actions processed. | [
"Process",
"actions",
"in",
"the",
"publishing",
"schedule",
"."
] | train | https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/publisher/utils.py#L7-L28 |
developersociety/django-glitter | glitter/publisher/utils.py | celery_enabled | def celery_enabled():
"""
Return a boolean if Celery tasks are enabled for this app.
If the ``GLITTER_PUBLISHER_CELERY`` setting is ``True`` or ``False`` - then that value will be
used. However if the setting isn't defined, then this will be enabled automatically if Celery
is installed.
"""
enabled = getattr(settings, 'GLITTER_PUBLISHER_CELERY', None)
if enabled is None:
try:
import celery # noqa
enabled = True
except ImportError:
enabled = False
return enabled | python | def celery_enabled():
"""
Return a boolean if Celery tasks are enabled for this app.
If the ``GLITTER_PUBLISHER_CELERY`` setting is ``True`` or ``False`` - then that value will be
used. However if the setting isn't defined, then this will be enabled automatically if Celery
is installed.
"""
enabled = getattr(settings, 'GLITTER_PUBLISHER_CELERY', None)
if enabled is None:
try:
import celery # noqa
enabled = True
except ImportError:
enabled = False
return enabled | [
"def",
"celery_enabled",
"(",
")",
":",
"enabled",
"=",
"getattr",
"(",
"settings",
",",
"'GLITTER_PUBLISHER_CELERY'",
",",
"None",
")",
"if",
"enabled",
"is",
"None",
":",
"try",
":",
"import",
"celery",
"# noqa",
"enabled",
"=",
"True",
"except",
"ImportError",
":",
"enabled",
"=",
"False",
"return",
"enabled"
] | Return a boolean if Celery tasks are enabled for this app.
If the ``GLITTER_PUBLISHER_CELERY`` setting is ``True`` or ``False`` - then that value will be
used. However if the setting isn't defined, then this will be enabled automatically if Celery
is installed. | [
"Return",
"a",
"boolean",
"if",
"Celery",
"tasks",
"are",
"enabled",
"for",
"this",
"app",
"."
] | train | https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/publisher/utils.py#L31-L48 |
davidblaisonneau-orange/foreman | foreman/operatingsystems.py | OperatingSystems.checkAndCreate | def checkAndCreate(self, key, payload):
""" Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object
"""
if key not in self:
if 'templates' in payload:
templates = payload.pop('templates')
self[key] = payload
self.reload()
return self[key]['id'] | python | def checkAndCreate(self, key, payload):
""" Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object
"""
if key not in self:
if 'templates' in payload:
templates = payload.pop('templates')
self[key] = payload
self.reload()
return self[key]['id'] | [
"def",
"checkAndCreate",
"(",
"self",
",",
"key",
",",
"payload",
")",
":",
"if",
"key",
"not",
"in",
"self",
":",
"if",
"'templates'",
"in",
"payload",
":",
"templates",
"=",
"payload",
".",
"pop",
"(",
"'templates'",
")",
"self",
"[",
"key",
"]",
"=",
"payload",
"self",
".",
"reload",
"(",
")",
"return",
"self",
"[",
"key",
"]",
"[",
"'id'",
"]"
] | Function checkAndCreate
Check if an object exists and create it if not
@param key: The targeted object
@param payload: The targeted object description
@return RETURN: The id of the object | [
"Function",
"checkAndCreate",
"Check",
"if",
"an",
"object",
"exists",
"and",
"create",
"it",
"if",
"not"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/operatingsystems.py#L51-L64 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance.__collect_interfaces_return | def __collect_interfaces_return(interfaces):
"""Collect new style (44.1+) return values to old-style kv-list"""
acc = []
for (interfaceName, interfaceData) in interfaces.items():
signalValues = interfaceData.get("signals", {})
for (signalName, signalValue) in signalValues.items():
pinName = "{0}.{1}".format(interfaceName, signalName)
acc.append({'id': pinName, 'value': signalValue})
return acc | python | def __collect_interfaces_return(interfaces):
"""Collect new style (44.1+) return values to old-style kv-list"""
acc = []
for (interfaceName, interfaceData) in interfaces.items():
signalValues = interfaceData.get("signals", {})
for (signalName, signalValue) in signalValues.items():
pinName = "{0}.{1}".format(interfaceName, signalName)
acc.append({'id': pinName, 'value': signalValue})
return acc | [
"def",
"__collect_interfaces_return",
"(",
"interfaces",
")",
":",
"acc",
"=",
"[",
"]",
"for",
"(",
"interfaceName",
",",
"interfaceData",
")",
"in",
"interfaces",
".",
"items",
"(",
")",
":",
"signalValues",
"=",
"interfaceData",
".",
"get",
"(",
"\"signals\"",
",",
"{",
"}",
")",
"for",
"(",
"signalName",
",",
"signalValue",
")",
"in",
"signalValues",
".",
"items",
"(",
")",
":",
"pinName",
"=",
"\"{0}.{1}\"",
".",
"format",
"(",
"interfaceName",
",",
"signalName",
")",
"acc",
".",
"append",
"(",
"{",
"'id'",
":",
"pinName",
",",
"'value'",
":",
"signalValue",
"}",
")",
"return",
"acc"
] | Collect new style (44.1+) return values to old-style kv-list | [
"Collect",
"new",
"style",
"(",
"44",
".",
"1",
"+",
")",
"return",
"values",
"to",
"old",
"-",
"style",
"kv",
"-",
"list"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L113-L121 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance.return_values | def return_values(self):
""" Guess what api we are using and return as public api does.
Private has {'id':'key', 'value':'keyvalue'} format, public has {'key':'keyvalue'}
"""
j = self.json()
#TODO: FIXME: get rid of old API when its support will be removed
public_api_value = j.get('returnValues')
old_private_value = j.get('endpoints')
new_private_value = self.__collect_interfaces_return(j.get('interfaces', {}))
retvals = new_private_value or old_private_value or public_api_value or []
# TODO: Public api hack.
if self._router.public_api_in_use:
return retvals
return self.__parse(retvals) | python | def return_values(self):
""" Guess what api we are using and return as public api does.
Private has {'id':'key', 'value':'keyvalue'} format, public has {'key':'keyvalue'}
"""
j = self.json()
#TODO: FIXME: get rid of old API when its support will be removed
public_api_value = j.get('returnValues')
old_private_value = j.get('endpoints')
new_private_value = self.__collect_interfaces_return(j.get('interfaces', {}))
retvals = new_private_value or old_private_value or public_api_value or []
# TODO: Public api hack.
if self._router.public_api_in_use:
return retvals
return self.__parse(retvals) | [
"def",
"return_values",
"(",
"self",
")",
":",
"j",
"=",
"self",
".",
"json",
"(",
")",
"#TODO: FIXME: get rid of old API when its support will be removed",
"public_api_value",
"=",
"j",
".",
"get",
"(",
"'returnValues'",
")",
"old_private_value",
"=",
"j",
".",
"get",
"(",
"'endpoints'",
")",
"new_private_value",
"=",
"self",
".",
"__collect_interfaces_return",
"(",
"j",
".",
"get",
"(",
"'interfaces'",
",",
"{",
"}",
")",
")",
"retvals",
"=",
"new_private_value",
"or",
"old_private_value",
"or",
"public_api_value",
"or",
"[",
"]",
"# TODO: Public api hack.",
"if",
"self",
".",
"_router",
".",
"public_api_in_use",
":",
"return",
"retvals",
"return",
"self",
".",
"__parse",
"(",
"retvals",
")"
] | Guess what api we are using and return as public api does.
Private has {'id':'key', 'value':'keyvalue'} format, public has {'key':'keyvalue'} | [
"Guess",
"what",
"api",
"we",
"are",
"using",
"and",
"return",
"as",
"public",
"api",
"does",
".",
"Private",
"has",
"{",
"id",
":",
"key",
"value",
":",
"keyvalue",
"}",
"format",
"public",
"has",
"{",
"key",
":",
"keyvalue",
"}"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L124-L139 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance.get_activitylog | def get_activitylog(self, after=None, severity=None, start=None, end=None):
"""
Returns activitylog object
severity - filter severity ('INFO', DEBUG')
start/end - time or log text
"""
if after:
log_raw = self._router.get_instance_activitylog(org_id=self.organizationId,
instance_id=self.instanceId,
params={"after": after}).json()
else:
log_raw = self._router.get_instance_activitylog(org_id=self.organizationId,
instance_id=self.instanceId).json()
return ActivityLog(log_raw, severity=severity, start=start, end=end) | python | def get_activitylog(self, after=None, severity=None, start=None, end=None):
"""
Returns activitylog object
severity - filter severity ('INFO', DEBUG')
start/end - time or log text
"""
if after:
log_raw = self._router.get_instance_activitylog(org_id=self.organizationId,
instance_id=self.instanceId,
params={"after": after}).json()
else:
log_raw = self._router.get_instance_activitylog(org_id=self.organizationId,
instance_id=self.instanceId).json()
return ActivityLog(log_raw, severity=severity, start=start, end=end) | [
"def",
"get_activitylog",
"(",
"self",
",",
"after",
"=",
"None",
",",
"severity",
"=",
"None",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
")",
":",
"if",
"after",
":",
"log_raw",
"=",
"self",
".",
"_router",
".",
"get_instance_activitylog",
"(",
"org_id",
"=",
"self",
".",
"organizationId",
",",
"instance_id",
"=",
"self",
".",
"instanceId",
",",
"params",
"=",
"{",
"\"after\"",
":",
"after",
"}",
")",
".",
"json",
"(",
")",
"else",
":",
"log_raw",
"=",
"self",
".",
"_router",
".",
"get_instance_activitylog",
"(",
"org_id",
"=",
"self",
".",
"organizationId",
",",
"instance_id",
"=",
"self",
".",
"instanceId",
")",
".",
"json",
"(",
")",
"return",
"ActivityLog",
"(",
"log_raw",
",",
"severity",
"=",
"severity",
",",
"start",
"=",
"start",
",",
"end",
"=",
"end",
")"
] | Returns activitylog object
severity - filter severity ('INFO', DEBUG')
start/end - time or log text | [
"Returns",
"activitylog",
"object",
"severity",
"-",
"filter",
"severity",
"(",
"INFO",
"DEBUG",
")",
"start",
"/",
"end",
"-",
"time",
"or",
"log",
"text"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L149-L164 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance.json | def json(self):
"""
return __cached_json, if accessed withing 300 ms.
This allows to optimize calls when many parameters of entity requires withing short time.
"""
if self.fresh():
return self.__cached_json
# noinspection PyAttributeOutsideInit
self.__last_read_time = time.time()
self.__cached_json = self._router.get_instance(org_id=self.organizationId, instance_id=self.instanceId).json()
return self.__cached_json | python | def json(self):
"""
return __cached_json, if accessed withing 300 ms.
This allows to optimize calls when many parameters of entity requires withing short time.
"""
if self.fresh():
return self.__cached_json
# noinspection PyAttributeOutsideInit
self.__last_read_time = time.time()
self.__cached_json = self._router.get_instance(org_id=self.organizationId, instance_id=self.instanceId).json()
return self.__cached_json | [
"def",
"json",
"(",
"self",
")",
":",
"if",
"self",
".",
"fresh",
"(",
")",
":",
"return",
"self",
".",
"__cached_json",
"# noinspection PyAttributeOutsideInit",
"self",
".",
"__last_read_time",
"=",
"time",
".",
"time",
"(",
")",
"self",
".",
"__cached_json",
"=",
"self",
".",
"_router",
".",
"get_instance",
"(",
"org_id",
"=",
"self",
".",
"organizationId",
",",
"instance_id",
"=",
"self",
".",
"instanceId",
")",
".",
"json",
"(",
")",
"return",
"self",
".",
"__cached_json"
] | return __cached_json, if accessed withing 300 ms.
This allows to optimize calls when many parameters of entity requires withing short time. | [
"return",
"__cached_json",
"if",
"accessed",
"withing",
"300",
"ms",
".",
"This",
"allows",
"to",
"optimize",
"calls",
"when",
"many",
"parameters",
"of",
"entity",
"requires",
"withing",
"short",
"time",
"."
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L264-L276 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance.get_most_recent_update_time | def get_most_recent_update_time(self):
"""
Indicated most recent update of the instance, assumption based on:
- if currentWorkflow exists, its startedAt time is most recent update.
- else max of workflowHistory startedAt is most recent update.
"""
def parse_time(t):
if t:
return time.gmtime(t/1000)
return None
try:
max_wf_started_at = max([i.get('startedAt') for i in self.workflowHistory])
return parse_time(max_wf_started_at)
except ValueError:
return None | python | def get_most_recent_update_time(self):
"""
Indicated most recent update of the instance, assumption based on:
- if currentWorkflow exists, its startedAt time is most recent update.
- else max of workflowHistory startedAt is most recent update.
"""
def parse_time(t):
if t:
return time.gmtime(t/1000)
return None
try:
max_wf_started_at = max([i.get('startedAt') for i in self.workflowHistory])
return parse_time(max_wf_started_at)
except ValueError:
return None | [
"def",
"get_most_recent_update_time",
"(",
"self",
")",
":",
"def",
"parse_time",
"(",
"t",
")",
":",
"if",
"t",
":",
"return",
"time",
".",
"gmtime",
"(",
"t",
"/",
"1000",
")",
"return",
"None",
"try",
":",
"max_wf_started_at",
"=",
"max",
"(",
"[",
"i",
".",
"get",
"(",
"'startedAt'",
")",
"for",
"i",
"in",
"self",
".",
"workflowHistory",
"]",
")",
"return",
"parse_time",
"(",
"max_wf_started_at",
")",
"except",
"ValueError",
":",
"return",
"None"
] | Indicated most recent update of the instance, assumption based on:
- if currentWorkflow exists, its startedAt time is most recent update.
- else max of workflowHistory startedAt is most recent update. | [
"Indicated",
"most",
"recent",
"update",
"of",
"the",
"instance",
"assumption",
"based",
"on",
":",
"-",
"if",
"currentWorkflow",
"exists",
"its",
"startedAt",
"time",
"is",
"most",
"recent",
"update",
".",
"-",
"else",
"max",
"of",
"workflowHistory",
"startedAt",
"is",
"most",
"recent",
"update",
"."
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L508-L522 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | Instance._is_projection_updated_instance | def _is_projection_updated_instance(self):
"""
This method tries to guess if instance was update since last time.
If return True, definitely Yes, if False, this means more unknown
:return: bool
"""
last = self._last_workflow_started_time
if not self._router.public_api_in_use:
most_recent = self.get_most_recent_update_time()
else:
most_recent = None
if last and most_recent:
return last < most_recent
return False | python | def _is_projection_updated_instance(self):
"""
This method tries to guess if instance was update since last time.
If return True, definitely Yes, if False, this means more unknown
:return: bool
"""
last = self._last_workflow_started_time
if not self._router.public_api_in_use:
most_recent = self.get_most_recent_update_time()
else:
most_recent = None
if last and most_recent:
return last < most_recent
return False | [
"def",
"_is_projection_updated_instance",
"(",
"self",
")",
":",
"last",
"=",
"self",
".",
"_last_workflow_started_time",
"if",
"not",
"self",
".",
"_router",
".",
"public_api_in_use",
":",
"most_recent",
"=",
"self",
".",
"get_most_recent_update_time",
"(",
")",
"else",
":",
"most_recent",
"=",
"None",
"if",
"last",
"and",
"most_recent",
":",
"return",
"last",
"<",
"most_recent",
"return",
"False"
] | This method tries to guess if instance was update since last time.
If return True, definitely Yes, if False, this means more unknown
:return: bool | [
"This",
"method",
"tries",
"to",
"guess",
"if",
"instance",
"was",
"update",
"since",
"last",
"time",
".",
"If",
"return",
"True",
"definitely",
"Yes",
"if",
"False",
"this",
"means",
"more",
"unknown",
":",
"return",
":",
"bool"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L524-L537 |
qubell/contrib-python-qubell-client | qubell/api/private/instance.py | ActivityLog.find | def find(self, item, description='', event_type=''):
"""
Find regexp in activitylog
find record as if type are in description.
"""
# TODO: should be refactored, dumb logic
if ': ' in item:
splited = item.split(': ', 1)
if splited[0] in self.TYPES:
description = item.split(': ')[1]
event_type = item.split(': ')[0]
else:
description = item
else:
if not description:
description = item
if event_type:
found = [x['time'] for x in self.log if re.search(description, x['description'])
and x['eventTypeText'] == event_type]
else:
found = [x['time'] for x in self.log if re.search(description, x['description'])]
if len(found):
return found
raise exceptions.NotFoundError("Item '{}' is not found with (description='{}', event_type='{}')".
format(item, description, event_type)) | python | def find(self, item, description='', event_type=''):
"""
Find regexp in activitylog
find record as if type are in description.
"""
# TODO: should be refactored, dumb logic
if ': ' in item:
splited = item.split(': ', 1)
if splited[0] in self.TYPES:
description = item.split(': ')[1]
event_type = item.split(': ')[0]
else:
description = item
else:
if not description:
description = item
if event_type:
found = [x['time'] for x in self.log if re.search(description, x['description'])
and x['eventTypeText'] == event_type]
else:
found = [x['time'] for x in self.log if re.search(description, x['description'])]
if len(found):
return found
raise exceptions.NotFoundError("Item '{}' is not found with (description='{}', event_type='{}')".
format(item, description, event_type)) | [
"def",
"find",
"(",
"self",
",",
"item",
",",
"description",
"=",
"''",
",",
"event_type",
"=",
"''",
")",
":",
"# TODO: should be refactored, dumb logic",
"if",
"': '",
"in",
"item",
":",
"splited",
"=",
"item",
".",
"split",
"(",
"': '",
",",
"1",
")",
"if",
"splited",
"[",
"0",
"]",
"in",
"self",
".",
"TYPES",
":",
"description",
"=",
"item",
".",
"split",
"(",
"': '",
")",
"[",
"1",
"]",
"event_type",
"=",
"item",
".",
"split",
"(",
"': '",
")",
"[",
"0",
"]",
"else",
":",
"description",
"=",
"item",
"else",
":",
"if",
"not",
"description",
":",
"description",
"=",
"item",
"if",
"event_type",
":",
"found",
"=",
"[",
"x",
"[",
"'time'",
"]",
"for",
"x",
"in",
"self",
".",
"log",
"if",
"re",
".",
"search",
"(",
"description",
",",
"x",
"[",
"'description'",
"]",
")",
"and",
"x",
"[",
"'eventTypeText'",
"]",
"==",
"event_type",
"]",
"else",
":",
"found",
"=",
"[",
"x",
"[",
"'time'",
"]",
"for",
"x",
"in",
"self",
".",
"log",
"if",
"re",
".",
"search",
"(",
"description",
",",
"x",
"[",
"'description'",
"]",
")",
"]",
"if",
"len",
"(",
"found",
")",
":",
"return",
"found",
"raise",
"exceptions",
".",
"NotFoundError",
"(",
"\"Item '{}' is not found with (description='{}', event_type='{}')\"",
".",
"format",
"(",
"item",
",",
"description",
",",
"event_type",
")",
")"
] | Find regexp in activitylog
find record as if type are in description. | [
"Find",
"regexp",
"in",
"activitylog",
"find",
"record",
"as",
"if",
"type",
"are",
"in",
"description",
"."
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/instance.py#L621-L647 |
jamescooke/flake8-aaa | src/flake8_aaa/command_line.py | do_command_line | def do_command_line(infile: typing.IO[str]) -> int:
"""
Currently a small stub to create an instance of Checker for the passed
``infile`` and run its test functions through linting.
Args:
infile
Returns:
int: Number of flake8 errors raised.
"""
lines = infile.readlines()
tree = ast.parse(''.join(lines))
checker = Checker(tree, lines, infile.name)
checker.load()
errors = [] # type: typing.List[AAAError]
for func in checker.all_funcs(skip_noqa=True):
try:
errors = list(func.check_all())
except ValidationError as error:
errors = [error.to_aaa()]
print(func.__str__(errors), end='')
return len(errors) | python | def do_command_line(infile: typing.IO[str]) -> int:
"""
Currently a small stub to create an instance of Checker for the passed
``infile`` and run its test functions through linting.
Args:
infile
Returns:
int: Number of flake8 errors raised.
"""
lines = infile.readlines()
tree = ast.parse(''.join(lines))
checker = Checker(tree, lines, infile.name)
checker.load()
errors = [] # type: typing.List[AAAError]
for func in checker.all_funcs(skip_noqa=True):
try:
errors = list(func.check_all())
except ValidationError as error:
errors = [error.to_aaa()]
print(func.__str__(errors), end='')
return len(errors) | [
"def",
"do_command_line",
"(",
"infile",
":",
"typing",
".",
"IO",
"[",
"str",
"]",
")",
"->",
"int",
":",
"lines",
"=",
"infile",
".",
"readlines",
"(",
")",
"tree",
"=",
"ast",
".",
"parse",
"(",
"''",
".",
"join",
"(",
"lines",
")",
")",
"checker",
"=",
"Checker",
"(",
"tree",
",",
"lines",
",",
"infile",
".",
"name",
")",
"checker",
".",
"load",
"(",
")",
"errors",
"=",
"[",
"]",
"# type: typing.List[AAAError]",
"for",
"func",
"in",
"checker",
".",
"all_funcs",
"(",
"skip_noqa",
"=",
"True",
")",
":",
"try",
":",
"errors",
"=",
"list",
"(",
"func",
".",
"check_all",
"(",
")",
")",
"except",
"ValidationError",
"as",
"error",
":",
"errors",
"=",
"[",
"error",
".",
"to_aaa",
"(",
")",
"]",
"print",
"(",
"func",
".",
"__str__",
"(",
"errors",
")",
",",
"end",
"=",
"''",
")",
"return",
"len",
"(",
"errors",
")"
] | Currently a small stub to create an instance of Checker for the passed
``infile`` and run its test functions through linting.
Args:
infile
Returns:
int: Number of flake8 errors raised. | [
"Currently",
"a",
"small",
"stub",
"to",
"create",
"an",
"instance",
"of",
"Checker",
"for",
"the",
"passed",
"infile",
"and",
"run",
"its",
"test",
"functions",
"through",
"linting",
"."
] | train | https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/command_line.py#L8-L30 |
aroberge/experimental | experimental/core/import_hook.py | MyMetaFinder.find_spec | def find_spec(self, fullname, path, target=None):
'''finds the appropriate properties (spec) of a module, and sets
its loader.'''
if not path:
path = [os.getcwd()]
if "." in fullname:
name = fullname.split(".")[-1]
else:
name = fullname
for entry in path:
if os.path.isdir(os.path.join(entry, name)):
# this module has child modules
filename = os.path.join(entry, name, "__init__.py")
submodule_locations = [os.path.join(entry, name)]
else:
filename = os.path.join(entry, name + ".py")
submodule_locations = None
if not os.path.exists(filename):
continue
return spec_from_file_location(fullname, filename,
loader=MyLoader(filename),
submodule_search_locations=submodule_locations)
return None | python | def find_spec(self, fullname, path, target=None):
'''finds the appropriate properties (spec) of a module, and sets
its loader.'''
if not path:
path = [os.getcwd()]
if "." in fullname:
name = fullname.split(".")[-1]
else:
name = fullname
for entry in path:
if os.path.isdir(os.path.join(entry, name)):
# this module has child modules
filename = os.path.join(entry, name, "__init__.py")
submodule_locations = [os.path.join(entry, name)]
else:
filename = os.path.join(entry, name + ".py")
submodule_locations = None
if not os.path.exists(filename):
continue
return spec_from_file_location(fullname, filename,
loader=MyLoader(filename),
submodule_search_locations=submodule_locations)
return None | [
"def",
"find_spec",
"(",
"self",
",",
"fullname",
",",
"path",
",",
"target",
"=",
"None",
")",
":",
"if",
"not",
"path",
":",
"path",
"=",
"[",
"os",
".",
"getcwd",
"(",
")",
"]",
"if",
"\".\"",
"in",
"fullname",
":",
"name",
"=",
"fullname",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
"else",
":",
"name",
"=",
"fullname",
"for",
"entry",
"in",
"path",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"entry",
",",
"name",
")",
")",
":",
"# this module has child modules",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"entry",
",",
"name",
",",
"\"__init__.py\"",
")",
"submodule_locations",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"entry",
",",
"name",
")",
"]",
"else",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"entry",
",",
"name",
"+",
"\".py\"",
")",
"submodule_locations",
"=",
"None",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"continue",
"return",
"spec_from_file_location",
"(",
"fullname",
",",
"filename",
",",
"loader",
"=",
"MyLoader",
"(",
"filename",
")",
",",
"submodule_search_locations",
"=",
"submodule_locations",
")",
"return",
"None"
] | finds the appropriate properties (spec) of a module, and sets
its loader. | [
"finds",
"the",
"appropriate",
"properties",
"(",
"spec",
")",
"of",
"a",
"module",
"and",
"sets",
"its",
"loader",
"."
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/import_hook.py#L47-L70 |
aroberge/experimental | experimental/core/import_hook.py | MyLoader.exec_module | def exec_module(self, module):
'''import the source code, transforma it before executing it so that
it is known to Python.'''
global MAIN_MODULE_NAME
if module.__name__ == MAIN_MODULE_NAME:
module.__name__ = "__main__"
MAIN_MODULE_NAME = None
with open(self.filename) as f:
source = f.read()
if transforms.transformers:
source = transforms.transform(source)
else:
for line in source.split('\n'):
if transforms.FROM_EXPERIMENTAL.match(line):
## transforms.transform will extract all such relevant
## lines and add them all relevant transformers
source = transforms.transform(source)
break
exec(source, vars(module)) | python | def exec_module(self, module):
'''import the source code, transforma it before executing it so that
it is known to Python.'''
global MAIN_MODULE_NAME
if module.__name__ == MAIN_MODULE_NAME:
module.__name__ = "__main__"
MAIN_MODULE_NAME = None
with open(self.filename) as f:
source = f.read()
if transforms.transformers:
source = transforms.transform(source)
else:
for line in source.split('\n'):
if transforms.FROM_EXPERIMENTAL.match(line):
## transforms.transform will extract all such relevant
## lines and add them all relevant transformers
source = transforms.transform(source)
break
exec(source, vars(module)) | [
"def",
"exec_module",
"(",
"self",
",",
"module",
")",
":",
"global",
"MAIN_MODULE_NAME",
"if",
"module",
".",
"__name__",
"==",
"MAIN_MODULE_NAME",
":",
"module",
".",
"__name__",
"=",
"\"__main__\"",
"MAIN_MODULE_NAME",
"=",
"None",
"with",
"open",
"(",
"self",
".",
"filename",
")",
"as",
"f",
":",
"source",
"=",
"f",
".",
"read",
"(",
")",
"if",
"transforms",
".",
"transformers",
":",
"source",
"=",
"transforms",
".",
"transform",
"(",
"source",
")",
"else",
":",
"for",
"line",
"in",
"source",
".",
"split",
"(",
"'\\n'",
")",
":",
"if",
"transforms",
".",
"FROM_EXPERIMENTAL",
".",
"match",
"(",
"line",
")",
":",
"## transforms.transform will extract all such relevant",
"## lines and add them all relevant transformers",
"source",
"=",
"transforms",
".",
"transform",
"(",
"source",
")",
"break",
"exec",
"(",
"source",
",",
"vars",
"(",
"module",
")",
")"
] | import the source code, transforma it before executing it so that
it is known to Python. | [
"import",
"the",
"source",
"code",
"transforma",
"it",
"before",
"executing",
"it",
"so",
"that",
"it",
"is",
"known",
"to",
"Python",
"."
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/import_hook.py#L83-L103 |
astroduff/commah | commah/commah.py | _izip | def _izip(*iterables):
""" Iterate through multiple lists or arrays of equal size """
# This izip routine is from itertools
# izip('ABCD', 'xy') --> Ax By
iterators = map(iter, iterables)
while iterators:
yield tuple(map(next, iterators)) | python | def _izip(*iterables):
""" Iterate through multiple lists or arrays of equal size """
# This izip routine is from itertools
# izip('ABCD', 'xy') --> Ax By
iterators = map(iter, iterables)
while iterators:
yield tuple(map(next, iterators)) | [
"def",
"_izip",
"(",
"*",
"iterables",
")",
":",
"# This izip routine is from itertools",
"# izip('ABCD', 'xy') --> Ax By",
"iterators",
"=",
"map",
"(",
"iter",
",",
"iterables",
")",
"while",
"iterators",
":",
"yield",
"tuple",
"(",
"map",
"(",
"next",
",",
"iterators",
")",
")"
] | Iterate through multiple lists or arrays of equal size | [
"Iterate",
"through",
"multiple",
"lists",
"or",
"arrays",
"of",
"equal",
"size"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L17-L24 |
astroduff/commah | commah/commah.py | _checkinput | def _checkinput(zi, Mi, z=False, verbose=None):
""" Check and convert any input scalar or array to numpy array """
# How many halo redshifts provided?
zi = np.array(zi, ndmin=1, dtype=float)
# How many halo masses provided?
Mi = np.array(Mi, ndmin=1, dtype=float)
# Check the input sizes for zi and Mi make sense, if not then exit unless
# one axis is length one, then replicate values to the size of the other
if (zi.size > 1) and (Mi.size > 1):
if(zi.size != Mi.size):
print("Error ambiguous request")
print("Need individual redshifts for all haloes provided ")
print("Or have all haloes at same redshift ")
return(-1)
elif (zi.size == 1) and (Mi.size > 1):
if verbose:
print("Assume zi is the same for all Mi halo masses provided")
# Replicate redshift for all halo masses
zi = np.ones_like(Mi)*zi[0]
elif (Mi.size == 1) and (zi.size > 1):
if verbose:
print("Assume Mi halo masses are the same for all zi provided")
# Replicate redshift for all halo masses
Mi = np.ones_like(zi)*Mi[0]
else:
if verbose:
print("A single Mi and zi provided")
# Very simple test for size / type of incoming array
# just in case numpy / list given
if z is False:
# Didn't pass anything, set zi = z
lenzout = 1
else:
# If something was passed, convert to 1D NumPy array
z = np.array(z, ndmin=1, dtype=float)
lenzout = z.size
return(zi, Mi, z, zi.size, Mi.size, lenzout) | python | def _checkinput(zi, Mi, z=False, verbose=None):
""" Check and convert any input scalar or array to numpy array """
# How many halo redshifts provided?
zi = np.array(zi, ndmin=1, dtype=float)
# How many halo masses provided?
Mi = np.array(Mi, ndmin=1, dtype=float)
# Check the input sizes for zi and Mi make sense, if not then exit unless
# one axis is length one, then replicate values to the size of the other
if (zi.size > 1) and (Mi.size > 1):
if(zi.size != Mi.size):
print("Error ambiguous request")
print("Need individual redshifts for all haloes provided ")
print("Or have all haloes at same redshift ")
return(-1)
elif (zi.size == 1) and (Mi.size > 1):
if verbose:
print("Assume zi is the same for all Mi halo masses provided")
# Replicate redshift for all halo masses
zi = np.ones_like(Mi)*zi[0]
elif (Mi.size == 1) and (zi.size > 1):
if verbose:
print("Assume Mi halo masses are the same for all zi provided")
# Replicate redshift for all halo masses
Mi = np.ones_like(zi)*Mi[0]
else:
if verbose:
print("A single Mi and zi provided")
# Very simple test for size / type of incoming array
# just in case numpy / list given
if z is False:
# Didn't pass anything, set zi = z
lenzout = 1
else:
# If something was passed, convert to 1D NumPy array
z = np.array(z, ndmin=1, dtype=float)
lenzout = z.size
return(zi, Mi, z, zi.size, Mi.size, lenzout) | [
"def",
"_checkinput",
"(",
"zi",
",",
"Mi",
",",
"z",
"=",
"False",
",",
"verbose",
"=",
"None",
")",
":",
"# How many halo redshifts provided?",
"zi",
"=",
"np",
".",
"array",
"(",
"zi",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"# How many halo masses provided?",
"Mi",
"=",
"np",
".",
"array",
"(",
"Mi",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"# Check the input sizes for zi and Mi make sense, if not then exit unless",
"# one axis is length one, then replicate values to the size of the other",
"if",
"(",
"zi",
".",
"size",
">",
"1",
")",
"and",
"(",
"Mi",
".",
"size",
">",
"1",
")",
":",
"if",
"(",
"zi",
".",
"size",
"!=",
"Mi",
".",
"size",
")",
":",
"print",
"(",
"\"Error ambiguous request\"",
")",
"print",
"(",
"\"Need individual redshifts for all haloes provided \"",
")",
"print",
"(",
"\"Or have all haloes at same redshift \"",
")",
"return",
"(",
"-",
"1",
")",
"elif",
"(",
"zi",
".",
"size",
"==",
"1",
")",
"and",
"(",
"Mi",
".",
"size",
">",
"1",
")",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Assume zi is the same for all Mi halo masses provided\"",
")",
"# Replicate redshift for all halo masses",
"zi",
"=",
"np",
".",
"ones_like",
"(",
"Mi",
")",
"*",
"zi",
"[",
"0",
"]",
"elif",
"(",
"Mi",
".",
"size",
"==",
"1",
")",
"and",
"(",
"zi",
".",
"size",
">",
"1",
")",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Assume Mi halo masses are the same for all zi provided\"",
")",
"# Replicate redshift for all halo masses",
"Mi",
"=",
"np",
".",
"ones_like",
"(",
"zi",
")",
"*",
"Mi",
"[",
"0",
"]",
"else",
":",
"if",
"verbose",
":",
"print",
"(",
"\"A single Mi and zi provided\"",
")",
"# Very simple test for size / type of incoming array",
"# just in case numpy / list given",
"if",
"z",
"is",
"False",
":",
"# Didn't pass anything, set zi = z",
"lenzout",
"=",
"1",
"else",
":",
"# If something was passed, convert to 1D NumPy array",
"z",
"=",
"np",
".",
"array",
"(",
"z",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"lenzout",
"=",
"z",
".",
"size",
"return",
"(",
"zi",
",",
"Mi",
",",
"z",
",",
"zi",
".",
"size",
",",
"Mi",
".",
"size",
",",
"lenzout",
")"
] | Check and convert any input scalar or array to numpy array | [
"Check",
"and",
"convert",
"any",
"input",
"scalar",
"or",
"array",
"to",
"numpy",
"array"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L27-L67 |
astroduff/commah | commah/commah.py | getcosmo | def getcosmo(cosmology):
""" Find cosmological parameters for named cosmo in cosmology.py list """
defaultcosmologies = {'dragons': cg.DRAGONS(), 'wmap1': cg.WMAP1_Mill(),
'wmap3': cg.WMAP3_ML(), 'wmap5': cg.WMAP5_mean(),
'wmap7': cg.WMAP7_ML(), 'wmap9': cg.WMAP9_ML(),
'wmap1_lss': cg.WMAP1_2dF_mean(),
'wmap3_mean': cg.WMAP3_mean(),
'wmap5_ml': cg.WMAP5_ML(),
'wmap5_lss': cg.WMAP5_BAO_SN_mean(),
'wmap7_lss': cg.WMAP7_BAO_H0_mean(),
'planck13': cg.Planck_2013(),
'planck15': cg.Planck_2015()}
if isinstance(cosmology, dict):
# User providing their own variables
cosmo = cosmology
if 'A_scaling' not in cosmology.keys():
A_scaling = getAscaling(cosmology, newcosmo=True)
cosmo.update({'A_scaling': A_scaling})
# Add extra variables by hand that cosmolopy requires
# note that they aren't used (set to zero)
for paramnames in cg.WMAP5_mean().keys():
if paramnames not in cosmology.keys():
cosmo.update({paramnames: 0})
elif cosmology.lower() in defaultcosmologies.keys():
# Load by name of cosmology instead
cosmo = defaultcosmologies[cosmology.lower()]
A_scaling = getAscaling(cosmology)
cosmo.update({'A_scaling': A_scaling})
else:
print("You haven't passed a dict of cosmological parameters ")
print("OR a recognised cosmology, you gave %s" % (cosmology))
# No idea why this has to be done by hand but should be O_k = 0
cosmo = cp.distance.set_omega_k_0(cosmo)
# Use the cosmology as **cosmo passed to cosmolopy routines
return(cosmo) | python | def getcosmo(cosmology):
""" Find cosmological parameters for named cosmo in cosmology.py list """
defaultcosmologies = {'dragons': cg.DRAGONS(), 'wmap1': cg.WMAP1_Mill(),
'wmap3': cg.WMAP3_ML(), 'wmap5': cg.WMAP5_mean(),
'wmap7': cg.WMAP7_ML(), 'wmap9': cg.WMAP9_ML(),
'wmap1_lss': cg.WMAP1_2dF_mean(),
'wmap3_mean': cg.WMAP3_mean(),
'wmap5_ml': cg.WMAP5_ML(),
'wmap5_lss': cg.WMAP5_BAO_SN_mean(),
'wmap7_lss': cg.WMAP7_BAO_H0_mean(),
'planck13': cg.Planck_2013(),
'planck15': cg.Planck_2015()}
if isinstance(cosmology, dict):
# User providing their own variables
cosmo = cosmology
if 'A_scaling' not in cosmology.keys():
A_scaling = getAscaling(cosmology, newcosmo=True)
cosmo.update({'A_scaling': A_scaling})
# Add extra variables by hand that cosmolopy requires
# note that they aren't used (set to zero)
for paramnames in cg.WMAP5_mean().keys():
if paramnames not in cosmology.keys():
cosmo.update({paramnames: 0})
elif cosmology.lower() in defaultcosmologies.keys():
# Load by name of cosmology instead
cosmo = defaultcosmologies[cosmology.lower()]
A_scaling = getAscaling(cosmology)
cosmo.update({'A_scaling': A_scaling})
else:
print("You haven't passed a dict of cosmological parameters ")
print("OR a recognised cosmology, you gave %s" % (cosmology))
# No idea why this has to be done by hand but should be O_k = 0
cosmo = cp.distance.set_omega_k_0(cosmo)
# Use the cosmology as **cosmo passed to cosmolopy routines
return(cosmo) | [
"def",
"getcosmo",
"(",
"cosmology",
")",
":",
"defaultcosmologies",
"=",
"{",
"'dragons'",
":",
"cg",
".",
"DRAGONS",
"(",
")",
",",
"'wmap1'",
":",
"cg",
".",
"WMAP1_Mill",
"(",
")",
",",
"'wmap3'",
":",
"cg",
".",
"WMAP3_ML",
"(",
")",
",",
"'wmap5'",
":",
"cg",
".",
"WMAP5_mean",
"(",
")",
",",
"'wmap7'",
":",
"cg",
".",
"WMAP7_ML",
"(",
")",
",",
"'wmap9'",
":",
"cg",
".",
"WMAP9_ML",
"(",
")",
",",
"'wmap1_lss'",
":",
"cg",
".",
"WMAP1_2dF_mean",
"(",
")",
",",
"'wmap3_mean'",
":",
"cg",
".",
"WMAP3_mean",
"(",
")",
",",
"'wmap5_ml'",
":",
"cg",
".",
"WMAP5_ML",
"(",
")",
",",
"'wmap5_lss'",
":",
"cg",
".",
"WMAP5_BAO_SN_mean",
"(",
")",
",",
"'wmap7_lss'",
":",
"cg",
".",
"WMAP7_BAO_H0_mean",
"(",
")",
",",
"'planck13'",
":",
"cg",
".",
"Planck_2013",
"(",
")",
",",
"'planck15'",
":",
"cg",
".",
"Planck_2015",
"(",
")",
"}",
"if",
"isinstance",
"(",
"cosmology",
",",
"dict",
")",
":",
"# User providing their own variables",
"cosmo",
"=",
"cosmology",
"if",
"'A_scaling'",
"not",
"in",
"cosmology",
".",
"keys",
"(",
")",
":",
"A_scaling",
"=",
"getAscaling",
"(",
"cosmology",
",",
"newcosmo",
"=",
"True",
")",
"cosmo",
".",
"update",
"(",
"{",
"'A_scaling'",
":",
"A_scaling",
"}",
")",
"# Add extra variables by hand that cosmolopy requires",
"# note that they aren't used (set to zero)",
"for",
"paramnames",
"in",
"cg",
".",
"WMAP5_mean",
"(",
")",
".",
"keys",
"(",
")",
":",
"if",
"paramnames",
"not",
"in",
"cosmology",
".",
"keys",
"(",
")",
":",
"cosmo",
".",
"update",
"(",
"{",
"paramnames",
":",
"0",
"}",
")",
"elif",
"cosmology",
".",
"lower",
"(",
")",
"in",
"defaultcosmologies",
".",
"keys",
"(",
")",
":",
"# Load by name of cosmology instead",
"cosmo",
"=",
"defaultcosmologies",
"[",
"cosmology",
".",
"lower",
"(",
")",
"]",
"A_scaling",
"=",
"getAscaling",
"(",
"cosmology",
")",
"cosmo",
".",
"update",
"(",
"{",
"'A_scaling'",
":",
"A_scaling",
"}",
")",
"else",
":",
"print",
"(",
"\"You haven't passed a dict of cosmological parameters \"",
")",
"print",
"(",
"\"OR a recognised cosmology, you gave %s\"",
"%",
"(",
"cosmology",
")",
")",
"# No idea why this has to be done by hand but should be O_k = 0",
"cosmo",
"=",
"cp",
".",
"distance",
".",
"set_omega_k_0",
"(",
"cosmo",
")",
"# Use the cosmology as **cosmo passed to cosmolopy routines",
"return",
"(",
"cosmo",
")"
] | Find cosmological parameters for named cosmo in cosmology.py list | [
"Find",
"cosmological",
"parameters",
"for",
"named",
"cosmo",
"in",
"cosmology",
".",
"py",
"list"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L70-L108 |
astroduff/commah | commah/commah.py | _getcosmoheader | def _getcosmoheader(cosmo):
""" Output the cosmology to a string for writing to file """
cosmoheader = ("# Cosmology (flat) Om:{0:.3f}, Ol:{1:.3f}, h:{2:.2f}, "
"sigma8:{3:.3f}, ns:{4:.2f}".format(
cosmo['omega_M_0'], cosmo['omega_lambda_0'], cosmo['h'],
cosmo['sigma_8'], cosmo['n']))
return(cosmoheader) | python | def _getcosmoheader(cosmo):
""" Output the cosmology to a string for writing to file """
cosmoheader = ("# Cosmology (flat) Om:{0:.3f}, Ol:{1:.3f}, h:{2:.2f}, "
"sigma8:{3:.3f}, ns:{4:.2f}".format(
cosmo['omega_M_0'], cosmo['omega_lambda_0'], cosmo['h'],
cosmo['sigma_8'], cosmo['n']))
return(cosmoheader) | [
"def",
"_getcosmoheader",
"(",
"cosmo",
")",
":",
"cosmoheader",
"=",
"(",
"\"# Cosmology (flat) Om:{0:.3f}, Ol:{1:.3f}, h:{2:.2f}, \"",
"\"sigma8:{3:.3f}, ns:{4:.2f}\"",
".",
"format",
"(",
"cosmo",
"[",
"'omega_M_0'",
"]",
",",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
",",
"cosmo",
"[",
"'h'",
"]",
",",
"cosmo",
"[",
"'sigma_8'",
"]",
",",
"cosmo",
"[",
"'n'",
"]",
")",
")",
"return",
"(",
"cosmoheader",
")"
] | Output the cosmology to a string for writing to file | [
"Output",
"the",
"cosmology",
"to",
"a",
"string",
"for",
"writing",
"to",
"file"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L111-L119 |
astroduff/commah | commah/commah.py | cduffy | def cduffy(z, M, vir='200crit', relaxed=True):
""" NFW conc from Duffy 08 Table 1 for halo mass and redshift"""
if(vir == '200crit'):
if relaxed:
params = [6.71, -0.091, -0.44]
else:
params = [5.71, -0.084, -0.47]
elif(vir == 'tophat'):
if relaxed:
params = [9.23, -0.090, -0.69]
else:
params = [7.85, -0.081, -0.71]
elif(vir == '200mean'):
if relaxed:
params = [11.93, -0.090, -0.99]
else:
params = [10.14, -0.081, -1.01]
else:
print("Didn't recognise the halo boundary definition provided %s"
% (vir))
return(params[0] * ((M/(2e12/0.72))**params[1]) * ((1+z)**params[2])) | python | def cduffy(z, M, vir='200crit', relaxed=True):
""" NFW conc from Duffy 08 Table 1 for halo mass and redshift"""
if(vir == '200crit'):
if relaxed:
params = [6.71, -0.091, -0.44]
else:
params = [5.71, -0.084, -0.47]
elif(vir == 'tophat'):
if relaxed:
params = [9.23, -0.090, -0.69]
else:
params = [7.85, -0.081, -0.71]
elif(vir == '200mean'):
if relaxed:
params = [11.93, -0.090, -0.99]
else:
params = [10.14, -0.081, -1.01]
else:
print("Didn't recognise the halo boundary definition provided %s"
% (vir))
return(params[0] * ((M/(2e12/0.72))**params[1]) * ((1+z)**params[2])) | [
"def",
"cduffy",
"(",
"z",
",",
"M",
",",
"vir",
"=",
"'200crit'",
",",
"relaxed",
"=",
"True",
")",
":",
"if",
"(",
"vir",
"==",
"'200crit'",
")",
":",
"if",
"relaxed",
":",
"params",
"=",
"[",
"6.71",
",",
"-",
"0.091",
",",
"-",
"0.44",
"]",
"else",
":",
"params",
"=",
"[",
"5.71",
",",
"-",
"0.084",
",",
"-",
"0.47",
"]",
"elif",
"(",
"vir",
"==",
"'tophat'",
")",
":",
"if",
"relaxed",
":",
"params",
"=",
"[",
"9.23",
",",
"-",
"0.090",
",",
"-",
"0.69",
"]",
"else",
":",
"params",
"=",
"[",
"7.85",
",",
"-",
"0.081",
",",
"-",
"0.71",
"]",
"elif",
"(",
"vir",
"==",
"'200mean'",
")",
":",
"if",
"relaxed",
":",
"params",
"=",
"[",
"11.93",
",",
"-",
"0.090",
",",
"-",
"0.99",
"]",
"else",
":",
"params",
"=",
"[",
"10.14",
",",
"-",
"0.081",
",",
"-",
"1.01",
"]",
"else",
":",
"print",
"(",
"\"Didn't recognise the halo boundary definition provided %s\"",
"%",
"(",
"vir",
")",
")",
"return",
"(",
"params",
"[",
"0",
"]",
"*",
"(",
"(",
"M",
"/",
"(",
"2e12",
"/",
"0.72",
")",
")",
"**",
"params",
"[",
"1",
"]",
")",
"*",
"(",
"(",
"1",
"+",
"z",
")",
"**",
"params",
"[",
"2",
"]",
")",
")"
] | NFW conc from Duffy 08 Table 1 for halo mass and redshift | [
"NFW",
"conc",
"from",
"Duffy",
"08",
"Table",
"1",
"for",
"halo",
"mass",
"and",
"redshift"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L122-L144 |
astroduff/commah | commah/commah.py | _delta_sigma | def _delta_sigma(**cosmo):
""" Perturb best-fit constant of proportionality Ascaling for
rho_crit - rho_2 relation for unknown cosmology (Correa et al 2015c)
Parameters
----------
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float
The perturbed 'A' relation between rho_2 and rho_crit for the cosmology
Raises
------
"""
M8_cosmo = cp.perturbation.radius_to_mass(8, **cosmo)
perturbed_A = (0.796/cosmo['sigma_8']) * \
(M8_cosmo/2.5e14)**((cosmo['n']-0.963)/6)
return(perturbed_A) | python | def _delta_sigma(**cosmo):
""" Perturb best-fit constant of proportionality Ascaling for
rho_crit - rho_2 relation for unknown cosmology (Correa et al 2015c)
Parameters
----------
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float
The perturbed 'A' relation between rho_2 and rho_crit for the cosmology
Raises
------
"""
M8_cosmo = cp.perturbation.radius_to_mass(8, **cosmo)
perturbed_A = (0.796/cosmo['sigma_8']) * \
(M8_cosmo/2.5e14)**((cosmo['n']-0.963)/6)
return(perturbed_A) | [
"def",
"_delta_sigma",
"(",
"*",
"*",
"cosmo",
")",
":",
"M8_cosmo",
"=",
"cp",
".",
"perturbation",
".",
"radius_to_mass",
"(",
"8",
",",
"*",
"*",
"cosmo",
")",
"perturbed_A",
"=",
"(",
"0.796",
"/",
"cosmo",
"[",
"'sigma_8'",
"]",
")",
"*",
"(",
"M8_cosmo",
"/",
"2.5e14",
")",
"**",
"(",
"(",
"cosmo",
"[",
"'n'",
"]",
"-",
"0.963",
")",
"/",
"6",
")",
"return",
"(",
"perturbed_A",
")"
] | Perturb best-fit constant of proportionality Ascaling for
rho_crit - rho_2 relation for unknown cosmology (Correa et al 2015c)
Parameters
----------
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float
The perturbed 'A' relation between rho_2 and rho_crit for the cosmology
Raises
------ | [
"Perturb",
"best",
"-",
"fit",
"constant",
"of",
"proportionality",
"Ascaling",
"for",
"rho_crit",
"-",
"rho_2",
"relation",
"for",
"unknown",
"cosmology",
"(",
"Correa",
"et",
"al",
"2015c",
")"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L147-L172 |
astroduff/commah | commah/commah.py | getAscaling | def getAscaling(cosmology, newcosmo=None):
""" Returns the normalisation constant between
Rho_-2 and Rho_mean(z_formation) for a given cosmology
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
newcosmo : str, optional
If cosmology is not from predefined list have to perturbation
A_scaling variable. Defaults to None.
Returns
-------
float
The scaled 'A' relation between rho_2 and rho_crit for the cosmology
"""
# Values from Correa 15c
defaultcosmologies = {'dragons': 887, 'wmap1': 853, 'wmap3': 850,
'wmap5': 887, 'wmap7': 887, 'wmap9': 950,
'wmap1_lss': 853, 'wmap3_mean': 850,
'wmap5_ml': 887, 'wmap5_lss': 887,
'wmap7_lss': 887,
'planck13': 880, 'planck15': 880}
if newcosmo:
# Scale from default WMAP5 cosmology using Correa et al 14b eqn C1
A_scaling = defaultcosmologies['wmap5'] * _delta_sigma(**cosmology)
else:
if cosmology.lower() in defaultcosmologies.keys():
A_scaling = defaultcosmologies[cosmology.lower()]
else:
print("Error, don't recognise your cosmology for A_scaling ")
print("You provided %s" % (cosmology))
return(A_scaling) | python | def getAscaling(cosmology, newcosmo=None):
""" Returns the normalisation constant between
Rho_-2 and Rho_mean(z_formation) for a given cosmology
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
newcosmo : str, optional
If cosmology is not from predefined list have to perturbation
A_scaling variable. Defaults to None.
Returns
-------
float
The scaled 'A' relation between rho_2 and rho_crit for the cosmology
"""
# Values from Correa 15c
defaultcosmologies = {'dragons': 887, 'wmap1': 853, 'wmap3': 850,
'wmap5': 887, 'wmap7': 887, 'wmap9': 950,
'wmap1_lss': 853, 'wmap3_mean': 850,
'wmap5_ml': 887, 'wmap5_lss': 887,
'wmap7_lss': 887,
'planck13': 880, 'planck15': 880}
if newcosmo:
# Scale from default WMAP5 cosmology using Correa et al 14b eqn C1
A_scaling = defaultcosmologies['wmap5'] * _delta_sigma(**cosmology)
else:
if cosmology.lower() in defaultcosmologies.keys():
A_scaling = defaultcosmologies[cosmology.lower()]
else:
print("Error, don't recognise your cosmology for A_scaling ")
print("You provided %s" % (cosmology))
return(A_scaling) | [
"def",
"getAscaling",
"(",
"cosmology",
",",
"newcosmo",
"=",
"None",
")",
":",
"# Values from Correa 15c",
"defaultcosmologies",
"=",
"{",
"'dragons'",
":",
"887",
",",
"'wmap1'",
":",
"853",
",",
"'wmap3'",
":",
"850",
",",
"'wmap5'",
":",
"887",
",",
"'wmap7'",
":",
"887",
",",
"'wmap9'",
":",
"950",
",",
"'wmap1_lss'",
":",
"853",
",",
"'wmap3_mean'",
":",
"850",
",",
"'wmap5_ml'",
":",
"887",
",",
"'wmap5_lss'",
":",
"887",
",",
"'wmap7_lss'",
":",
"887",
",",
"'planck13'",
":",
"880",
",",
"'planck15'",
":",
"880",
"}",
"if",
"newcosmo",
":",
"# Scale from default WMAP5 cosmology using Correa et al 14b eqn C1",
"A_scaling",
"=",
"defaultcosmologies",
"[",
"'wmap5'",
"]",
"*",
"_delta_sigma",
"(",
"*",
"*",
"cosmology",
")",
"else",
":",
"if",
"cosmology",
".",
"lower",
"(",
")",
"in",
"defaultcosmologies",
".",
"keys",
"(",
")",
":",
"A_scaling",
"=",
"defaultcosmologies",
"[",
"cosmology",
".",
"lower",
"(",
")",
"]",
"else",
":",
"print",
"(",
"\"Error, don't recognise your cosmology for A_scaling \"",
")",
"print",
"(",
"\"You provided %s\"",
"%",
"(",
"cosmology",
")",
")",
"return",
"(",
"A_scaling",
")"
] | Returns the normalisation constant between
Rho_-2 and Rho_mean(z_formation) for a given cosmology
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
newcosmo : str, optional
If cosmology is not from predefined list have to perturbation
A_scaling variable. Defaults to None.
Returns
-------
float
The scaled 'A' relation between rho_2 and rho_crit for the cosmology | [
"Returns",
"the",
"normalisation",
"constant",
"between",
"Rho_",
"-",
"2",
"and",
"Rho_mean",
"(",
"z_formation",
")",
"for",
"a",
"given",
"cosmology"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L175-L216 |
astroduff/commah | commah/commah.py | _int_growth | def _int_growth(z, **cosmo):
""" Returns integral of the linear growth factor from z=200 to z=z """
zmax = 200
if hasattr(z, "__len__"):
for zval in z:
assert(zval < zmax)
else:
assert(z < zmax)
y, yerr = scipy.integrate.quad(
lambda z: (1 + z)/(cosmo['omega_M_0']*(1 + z)**3 +
cosmo['omega_lambda_0'])**(1.5),
z, zmax)
return(y) | python | def _int_growth(z, **cosmo):
""" Returns integral of the linear growth factor from z=200 to z=z """
zmax = 200
if hasattr(z, "__len__"):
for zval in z:
assert(zval < zmax)
else:
assert(z < zmax)
y, yerr = scipy.integrate.quad(
lambda z: (1 + z)/(cosmo['omega_M_0']*(1 + z)**3 +
cosmo['omega_lambda_0'])**(1.5),
z, zmax)
return(y) | [
"def",
"_int_growth",
"(",
"z",
",",
"*",
"*",
"cosmo",
")",
":",
"zmax",
"=",
"200",
"if",
"hasattr",
"(",
"z",
",",
"\"__len__\"",
")",
":",
"for",
"zval",
"in",
"z",
":",
"assert",
"(",
"zval",
"<",
"zmax",
")",
"else",
":",
"assert",
"(",
"z",
"<",
"zmax",
")",
"y",
",",
"yerr",
"=",
"scipy",
".",
"integrate",
".",
"quad",
"(",
"lambda",
"z",
":",
"(",
"1",
"+",
"z",
")",
"/",
"(",
"cosmo",
"[",
"'omega_M_0'",
"]",
"*",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
"**",
"(",
"1.5",
")",
",",
"z",
",",
"zmax",
")",
"return",
"(",
"y",
")"
] | Returns integral of the linear growth factor from z=200 to z=z | [
"Returns",
"integral",
"of",
"the",
"linear",
"growth",
"factor",
"from",
"z",
"=",
"200",
"to",
"z",
"=",
"z"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L219-L235 |
astroduff/commah | commah/commah.py | _deriv_growth | def _deriv_growth(z, **cosmo):
""" Returns derivative of the linear growth factor at z
for a given cosmology **cosmo """
inv_h = (cosmo['omega_M_0']*(1 + z)**3 + cosmo['omega_lambda_0'])**(-0.5)
fz = (1 + z) * inv_h**3
deriv_g = growthfactor(z, norm=True, **cosmo)*(inv_h**2) *\
1.5 * cosmo['omega_M_0'] * (1 + z)**2 -\
fz * growthfactor(z, norm=True, **cosmo)/_int_growth(z, **cosmo)
return(deriv_g) | python | def _deriv_growth(z, **cosmo):
""" Returns derivative of the linear growth factor at z
for a given cosmology **cosmo """
inv_h = (cosmo['omega_M_0']*(1 + z)**3 + cosmo['omega_lambda_0'])**(-0.5)
fz = (1 + z) * inv_h**3
deriv_g = growthfactor(z, norm=True, **cosmo)*(inv_h**2) *\
1.5 * cosmo['omega_M_0'] * (1 + z)**2 -\
fz * growthfactor(z, norm=True, **cosmo)/_int_growth(z, **cosmo)
return(deriv_g) | [
"def",
"_deriv_growth",
"(",
"z",
",",
"*",
"*",
"cosmo",
")",
":",
"inv_h",
"=",
"(",
"cosmo",
"[",
"'omega_M_0'",
"]",
"*",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
"**",
"(",
"-",
"0.5",
")",
"fz",
"=",
"(",
"1",
"+",
"z",
")",
"*",
"inv_h",
"**",
"3",
"deriv_g",
"=",
"growthfactor",
"(",
"z",
",",
"norm",
"=",
"True",
",",
"*",
"*",
"cosmo",
")",
"*",
"(",
"inv_h",
"**",
"2",
")",
"*",
"1.5",
"*",
"cosmo",
"[",
"'omega_M_0'",
"]",
"*",
"(",
"1",
"+",
"z",
")",
"**",
"2",
"-",
"fz",
"*",
"growthfactor",
"(",
"z",
",",
"norm",
"=",
"True",
",",
"*",
"*",
"cosmo",
")",
"/",
"_int_growth",
"(",
"z",
",",
"*",
"*",
"cosmo",
")",
"return",
"(",
"deriv_g",
")"
] | Returns derivative of the linear growth factor at z
for a given cosmology **cosmo | [
"Returns",
"derivative",
"of",
"the",
"linear",
"growth",
"factor",
"at",
"z",
"for",
"a",
"given",
"cosmology",
"**",
"cosmo"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L238-L249 |
astroduff/commah | commah/commah.py | growthfactor | def growthfactor(z, norm=True, **cosmo):
""" Returns linear growth factor at a given redshift, normalised to z=0
by default, for a given cosmology
Parameters
----------
z : float or numpy array
The redshift at which the growth factor should be calculated
norm : boolean, optional
If true then normalise the growth factor to z=0 case defaults True
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float or numpy array
The growth factor at a range of redshifts 'z'
Raises
------
"""
H = np.sqrt(cosmo['omega_M_0'] * (1 + z)**3 +
cosmo['omega_lambda_0'])
growthval = H * _int_growth(z, **cosmo)
if norm:
growthval /= _int_growth(0, **cosmo)
return(growthval) | python | def growthfactor(z, norm=True, **cosmo):
""" Returns linear growth factor at a given redshift, normalised to z=0
by default, for a given cosmology
Parameters
----------
z : float or numpy array
The redshift at which the growth factor should be calculated
norm : boolean, optional
If true then normalise the growth factor to z=0 case defaults True
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float or numpy array
The growth factor at a range of redshifts 'z'
Raises
------
"""
H = np.sqrt(cosmo['omega_M_0'] * (1 + z)**3 +
cosmo['omega_lambda_0'])
growthval = H * _int_growth(z, **cosmo)
if norm:
growthval /= _int_growth(0, **cosmo)
return(growthval) | [
"def",
"growthfactor",
"(",
"z",
",",
"norm",
"=",
"True",
",",
"*",
"*",
"cosmo",
")",
":",
"H",
"=",
"np",
".",
"sqrt",
"(",
"cosmo",
"[",
"'omega_M_0'",
"]",
"*",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
"growthval",
"=",
"H",
"*",
"_int_growth",
"(",
"z",
",",
"*",
"*",
"cosmo",
")",
"if",
"norm",
":",
"growthval",
"/=",
"_int_growth",
"(",
"0",
",",
"*",
"*",
"cosmo",
")",
"return",
"(",
"growthval",
")"
] | Returns linear growth factor at a given redshift, normalised to z=0
by default, for a given cosmology
Parameters
----------
z : float or numpy array
The redshift at which the growth factor should be calculated
norm : boolean, optional
If true then normalise the growth factor to z=0 case defaults True
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
float or numpy array
The growth factor at a range of redshifts 'z'
Raises
------ | [
"Returns",
"linear",
"growth",
"factor",
"at",
"a",
"given",
"redshift",
"normalised",
"to",
"z",
"=",
"0",
"by",
"default",
"for",
"a",
"given",
"cosmology"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L252-L284 |
astroduff/commah | commah/commah.py | _minimize_c | def _minimize_c(c, z=0, a_tilde=1, b_tilde=-1,
Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75):
""" Trial function to solve 2 eqns (17 and 18) from Correa et al. (2015c)
for 1 unknown, i.e. concentration, returned by a minimisation call """
# Fn 1 (LHS of Eqn 18)
Y1 = np.log(2) - 0.5
Yc = np.log(1+c) - c/(1+c)
f1 = Y1/Yc
# Fn 2 (RHS of Eqn 18)
# Eqn 14 - Define the mean inner density
rho_2 = 200 * c**3 * Y1 / Yc
# Eqn 17 rearranged to solve for Formation Redshift
# essentially when universe had rho_2 density
zf = (((1 + z)**3 + omega_lambda_0/omega_M_0) *
(rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1
# RHS of Eqn 19
f2 = ((1 + zf - z)**a_tilde) * np.exp((zf - z) * b_tilde)
# LHS - RHS should be zero for the correct concentration
return(f1-f2) | python | def _minimize_c(c, z=0, a_tilde=1, b_tilde=-1,
Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75):
""" Trial function to solve 2 eqns (17 and 18) from Correa et al. (2015c)
for 1 unknown, i.e. concentration, returned by a minimisation call """
# Fn 1 (LHS of Eqn 18)
Y1 = np.log(2) - 0.5
Yc = np.log(1+c) - c/(1+c)
f1 = Y1/Yc
# Fn 2 (RHS of Eqn 18)
# Eqn 14 - Define the mean inner density
rho_2 = 200 * c**3 * Y1 / Yc
# Eqn 17 rearranged to solve for Formation Redshift
# essentially when universe had rho_2 density
zf = (((1 + z)**3 + omega_lambda_0/omega_M_0) *
(rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1
# RHS of Eqn 19
f2 = ((1 + zf - z)**a_tilde) * np.exp((zf - z) * b_tilde)
# LHS - RHS should be zero for the correct concentration
return(f1-f2) | [
"def",
"_minimize_c",
"(",
"c",
",",
"z",
"=",
"0",
",",
"a_tilde",
"=",
"1",
",",
"b_tilde",
"=",
"-",
"1",
",",
"Ascaling",
"=",
"900",
",",
"omega_M_0",
"=",
"0.25",
",",
"omega_lambda_0",
"=",
"0.75",
")",
":",
"# Fn 1 (LHS of Eqn 18)",
"Y1",
"=",
"np",
".",
"log",
"(",
"2",
")",
"-",
"0.5",
"Yc",
"=",
"np",
".",
"log",
"(",
"1",
"+",
"c",
")",
"-",
"c",
"/",
"(",
"1",
"+",
"c",
")",
"f1",
"=",
"Y1",
"/",
"Yc",
"# Fn 2 (RHS of Eqn 18)",
"# Eqn 14 - Define the mean inner density",
"rho_2",
"=",
"200",
"*",
"c",
"**",
"3",
"*",
"Y1",
"/",
"Yc",
"# Eqn 17 rearranged to solve for Formation Redshift",
"# essentially when universe had rho_2 density",
"zf",
"=",
"(",
"(",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"omega_lambda_0",
"/",
"omega_M_0",
")",
"*",
"(",
"rho_2",
"/",
"Ascaling",
")",
"-",
"omega_lambda_0",
"/",
"omega_M_0",
")",
"**",
"(",
"1",
"/",
"3",
")",
"-",
"1",
"# RHS of Eqn 19",
"f2",
"=",
"(",
"(",
"1",
"+",
"zf",
"-",
"z",
")",
"**",
"a_tilde",
")",
"*",
"np",
".",
"exp",
"(",
"(",
"zf",
"-",
"z",
")",
"*",
"b_tilde",
")",
"# LHS - RHS should be zero for the correct concentration",
"return",
"(",
"f1",
"-",
"f2",
")"
] | Trial function to solve 2 eqns (17 and 18) from Correa et al. (2015c)
for 1 unknown, i.e. concentration, returned by a minimisation call | [
"Trial",
"function",
"to",
"solve",
"2",
"eqns",
"(",
"17",
"and",
"18",
")",
"from",
"Correa",
"et",
"al",
".",
"(",
"2015c",
")",
"for",
"1",
"unknown",
"i",
".",
"e",
".",
"concentration",
"returned",
"by",
"a",
"minimisation",
"call"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L287-L312 |
astroduff/commah | commah/commah.py | formationz | def formationz(c, z, Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75):
""" Rearrange eqn 18 from Correa et al (2015c) to return
formation redshift for a concentration at a given redshift
Parameters
----------
c : float / numpy array
Concentration of halo
z : float / numpy array
Redshift of halo with concentration c
Ascaling : float
Cosmological dependent scaling between densities, use function
getAscaling('WMAP5') if unsure. Default is 900.
omega_M_0 : float
Mass density of the universe. Default is 0.25
omega_lambda_0 : float
Dark Energy density of the universe. Default is 0.75
Returns
-------
zf : float / numpy array
Formation redshift for halo of concentration 'c' at redshift 'z'
"""
Y1 = np.log(2) - 0.5
Yc = np.log(1+c) - c/(1+c)
rho_2 = 200*(c**3)*Y1/Yc
zf = (((1+z)**3 + omega_lambda_0/omega_M_0) *
(rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1
return(zf) | python | def formationz(c, z, Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75):
""" Rearrange eqn 18 from Correa et al (2015c) to return
formation redshift for a concentration at a given redshift
Parameters
----------
c : float / numpy array
Concentration of halo
z : float / numpy array
Redshift of halo with concentration c
Ascaling : float
Cosmological dependent scaling between densities, use function
getAscaling('WMAP5') if unsure. Default is 900.
omega_M_0 : float
Mass density of the universe. Default is 0.25
omega_lambda_0 : float
Dark Energy density of the universe. Default is 0.75
Returns
-------
zf : float / numpy array
Formation redshift for halo of concentration 'c' at redshift 'z'
"""
Y1 = np.log(2) - 0.5
Yc = np.log(1+c) - c/(1+c)
rho_2 = 200*(c**3)*Y1/Yc
zf = (((1+z)**3 + omega_lambda_0/omega_M_0) *
(rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1
return(zf) | [
"def",
"formationz",
"(",
"c",
",",
"z",
",",
"Ascaling",
"=",
"900",
",",
"omega_M_0",
"=",
"0.25",
",",
"omega_lambda_0",
"=",
"0.75",
")",
":",
"Y1",
"=",
"np",
".",
"log",
"(",
"2",
")",
"-",
"0.5",
"Yc",
"=",
"np",
".",
"log",
"(",
"1",
"+",
"c",
")",
"-",
"c",
"/",
"(",
"1",
"+",
"c",
")",
"rho_2",
"=",
"200",
"*",
"(",
"c",
"**",
"3",
")",
"*",
"Y1",
"/",
"Yc",
"zf",
"=",
"(",
"(",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"omega_lambda_0",
"/",
"omega_M_0",
")",
"*",
"(",
"rho_2",
"/",
"Ascaling",
")",
"-",
"omega_lambda_0",
"/",
"omega_M_0",
")",
"**",
"(",
"1",
"/",
"3",
")",
"-",
"1",
"return",
"(",
"zf",
")"
] | Rearrange eqn 18 from Correa et al (2015c) to return
formation redshift for a concentration at a given redshift
Parameters
----------
c : float / numpy array
Concentration of halo
z : float / numpy array
Redshift of halo with concentration c
Ascaling : float
Cosmological dependent scaling between densities, use function
getAscaling('WMAP5') if unsure. Default is 900.
omega_M_0 : float
Mass density of the universe. Default is 0.25
omega_lambda_0 : float
Dark Energy density of the universe. Default is 0.75
Returns
-------
zf : float / numpy array
Formation redshift for halo of concentration 'c' at redshift 'z' | [
"Rearrange",
"eqn",
"18",
"from",
"Correa",
"et",
"al",
"(",
"2015c",
")",
"to",
"return",
"formation",
"redshift",
"for",
"a",
"concentration",
"at",
"a",
"given",
"redshift"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L315-L346 |
astroduff/commah | commah/commah.py | calc_ab | def calc_ab(zi, Mi, **cosmo):
""" Calculate growth rate indices a_tilde and b_tilde
Parameters
----------
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(a_tilde, b_tilde) : float
"""
# When zi = 0, the a_tilde becomes alpha and b_tilde becomes beta
# Eqn 23 of Correa et al 2015a (analytically solve from Eqn 16 and 17)
# Arbitray formation redshift, z_-2 in COM is more physically motivated
zf = -0.0064 * (np.log10(Mi))**2 + 0.0237 * (np.log10(Mi)) + 1.8837
# Eqn 22 of Correa et al 2015a
q = 4.137 * zf**(-0.9476)
# Radius of a mass Mi
R_Mass = cp.perturbation.mass_to_radius(Mi, **cosmo) # [Mpc]
# Radius of a mass Mi/q
Rq_Mass = cp.perturbation.mass_to_radius(Mi/q, **cosmo) # [Mpc]
# Mass variance 'sigma' evaluate at z=0 to a good approximation
sig, err_sig = cp.perturbation.sigma_r(R_Mass, 0, **cosmo) # [Mpc]
sigq, err_sigq = cp.perturbation.sigma_r(Rq_Mass, 0, **cosmo) # [Mpc]
f = (sigq**2 - sig**2)**(-0.5)
# Eqn 9 and 10 from Correa et al 2015c
# (generalised to zi from Correa et al 2015a's z=0 special case)
# a_tilde is power law growth rate
a_tilde = (np.sqrt(2/np.pi) * 1.686 * _deriv_growth(zi, **cosmo) /
growthfactor(zi, norm=True, **cosmo)**2 + 1)*f
# b_tilde is exponential growth rate
b_tilde = -f
return(a_tilde, b_tilde) | python | def calc_ab(zi, Mi, **cosmo):
""" Calculate growth rate indices a_tilde and b_tilde
Parameters
----------
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(a_tilde, b_tilde) : float
"""
# When zi = 0, the a_tilde becomes alpha and b_tilde becomes beta
# Eqn 23 of Correa et al 2015a (analytically solve from Eqn 16 and 17)
# Arbitray formation redshift, z_-2 in COM is more physically motivated
zf = -0.0064 * (np.log10(Mi))**2 + 0.0237 * (np.log10(Mi)) + 1.8837
# Eqn 22 of Correa et al 2015a
q = 4.137 * zf**(-0.9476)
# Radius of a mass Mi
R_Mass = cp.perturbation.mass_to_radius(Mi, **cosmo) # [Mpc]
# Radius of a mass Mi/q
Rq_Mass = cp.perturbation.mass_to_radius(Mi/q, **cosmo) # [Mpc]
# Mass variance 'sigma' evaluate at z=0 to a good approximation
sig, err_sig = cp.perturbation.sigma_r(R_Mass, 0, **cosmo) # [Mpc]
sigq, err_sigq = cp.perturbation.sigma_r(Rq_Mass, 0, **cosmo) # [Mpc]
f = (sigq**2 - sig**2)**(-0.5)
# Eqn 9 and 10 from Correa et al 2015c
# (generalised to zi from Correa et al 2015a's z=0 special case)
# a_tilde is power law growth rate
a_tilde = (np.sqrt(2/np.pi) * 1.686 * _deriv_growth(zi, **cosmo) /
growthfactor(zi, norm=True, **cosmo)**2 + 1)*f
# b_tilde is exponential growth rate
b_tilde = -f
return(a_tilde, b_tilde) | [
"def",
"calc_ab",
"(",
"zi",
",",
"Mi",
",",
"*",
"*",
"cosmo",
")",
":",
"# When zi = 0, the a_tilde becomes alpha and b_tilde becomes beta",
"# Eqn 23 of Correa et al 2015a (analytically solve from Eqn 16 and 17)",
"# Arbitray formation redshift, z_-2 in COM is more physically motivated",
"zf",
"=",
"-",
"0.0064",
"*",
"(",
"np",
".",
"log10",
"(",
"Mi",
")",
")",
"**",
"2",
"+",
"0.0237",
"*",
"(",
"np",
".",
"log10",
"(",
"Mi",
")",
")",
"+",
"1.8837",
"# Eqn 22 of Correa et al 2015a",
"q",
"=",
"4.137",
"*",
"zf",
"**",
"(",
"-",
"0.9476",
")",
"# Radius of a mass Mi",
"R_Mass",
"=",
"cp",
".",
"perturbation",
".",
"mass_to_radius",
"(",
"Mi",
",",
"*",
"*",
"cosmo",
")",
"# [Mpc]",
"# Radius of a mass Mi/q",
"Rq_Mass",
"=",
"cp",
".",
"perturbation",
".",
"mass_to_radius",
"(",
"Mi",
"/",
"q",
",",
"*",
"*",
"cosmo",
")",
"# [Mpc]",
"# Mass variance 'sigma' evaluate at z=0 to a good approximation",
"sig",
",",
"err_sig",
"=",
"cp",
".",
"perturbation",
".",
"sigma_r",
"(",
"R_Mass",
",",
"0",
",",
"*",
"*",
"cosmo",
")",
"# [Mpc]",
"sigq",
",",
"err_sigq",
"=",
"cp",
".",
"perturbation",
".",
"sigma_r",
"(",
"Rq_Mass",
",",
"0",
",",
"*",
"*",
"cosmo",
")",
"# [Mpc]",
"f",
"=",
"(",
"sigq",
"**",
"2",
"-",
"sig",
"**",
"2",
")",
"**",
"(",
"-",
"0.5",
")",
"# Eqn 9 and 10 from Correa et al 2015c",
"# (generalised to zi from Correa et al 2015a's z=0 special case)",
"# a_tilde is power law growth rate",
"a_tilde",
"=",
"(",
"np",
".",
"sqrt",
"(",
"2",
"/",
"np",
".",
"pi",
")",
"*",
"1.686",
"*",
"_deriv_growth",
"(",
"zi",
",",
"*",
"*",
"cosmo",
")",
"/",
"growthfactor",
"(",
"zi",
",",
"norm",
"=",
"True",
",",
"*",
"*",
"cosmo",
")",
"**",
"2",
"+",
"1",
")",
"*",
"f",
"# b_tilde is exponential growth rate",
"b_tilde",
"=",
"-",
"f",
"return",
"(",
"a_tilde",
",",
"b_tilde",
")"
] | Calculate growth rate indices a_tilde and b_tilde
Parameters
----------
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(a_tilde, b_tilde) : float | [
"Calculate",
"growth",
"rate",
"indices",
"a_tilde",
"and",
"b_tilde"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L349-L397 |
astroduff/commah | commah/commah.py | acc_rate | def acc_rate(z, zi, Mi, **cosmo):
""" Calculate accretion rate and mass history of a halo at any
redshift 'z' with mass 'Mi' at a lower redshift 'z'
Parameters
----------
z : float
Redshift to solve acc_rate / mass history. Note zi<z
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z'
"""
# Find parameters a_tilde and b_tilde for initial redshift
# use Eqn 9 and 10 of Correa et al. (2015c)
a_tilde, b_tilde = calc_ab(zi, Mi, **cosmo)
# Halo mass at z, in Msol
# use Eqn 8 in Correa et al. (2015c)
Mz = Mi * ((1 + z - zi)**a_tilde) * (np.exp(b_tilde * (z - zi)))
# Accretion rate at z, Msol yr^-1
# use Eqn 11 from Correa et al. (2015c)
dMdt = 71.6 * (Mz/1e12) * (cosmo['h']/0.7) *\
(-a_tilde / (1 + z - zi) - b_tilde) * (1 + z) *\
np.sqrt(cosmo['omega_M_0']*(1 + z)**3+cosmo['omega_lambda_0'])
return(dMdt, Mz) | python | def acc_rate(z, zi, Mi, **cosmo):
""" Calculate accretion rate and mass history of a halo at any
redshift 'z' with mass 'Mi' at a lower redshift 'z'
Parameters
----------
z : float
Redshift to solve acc_rate / mass history. Note zi<z
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z'
"""
# Find parameters a_tilde and b_tilde for initial redshift
# use Eqn 9 and 10 of Correa et al. (2015c)
a_tilde, b_tilde = calc_ab(zi, Mi, **cosmo)
# Halo mass at z, in Msol
# use Eqn 8 in Correa et al. (2015c)
Mz = Mi * ((1 + z - zi)**a_tilde) * (np.exp(b_tilde * (z - zi)))
# Accretion rate at z, Msol yr^-1
# use Eqn 11 from Correa et al. (2015c)
dMdt = 71.6 * (Mz/1e12) * (cosmo['h']/0.7) *\
(-a_tilde / (1 + z - zi) - b_tilde) * (1 + z) *\
np.sqrt(cosmo['omega_M_0']*(1 + z)**3+cosmo['omega_lambda_0'])
return(dMdt, Mz) | [
"def",
"acc_rate",
"(",
"z",
",",
"zi",
",",
"Mi",
",",
"*",
"*",
"cosmo",
")",
":",
"# Find parameters a_tilde and b_tilde for initial redshift",
"# use Eqn 9 and 10 of Correa et al. (2015c)",
"a_tilde",
",",
"b_tilde",
"=",
"calc_ab",
"(",
"zi",
",",
"Mi",
",",
"*",
"*",
"cosmo",
")",
"# Halo mass at z, in Msol",
"# use Eqn 8 in Correa et al. (2015c)",
"Mz",
"=",
"Mi",
"*",
"(",
"(",
"1",
"+",
"z",
"-",
"zi",
")",
"**",
"a_tilde",
")",
"*",
"(",
"np",
".",
"exp",
"(",
"b_tilde",
"*",
"(",
"z",
"-",
"zi",
")",
")",
")",
"# Accretion rate at z, Msol yr^-1",
"# use Eqn 11 from Correa et al. (2015c)",
"dMdt",
"=",
"71.6",
"*",
"(",
"Mz",
"/",
"1e12",
")",
"*",
"(",
"cosmo",
"[",
"'h'",
"]",
"/",
"0.7",
")",
"*",
"(",
"-",
"a_tilde",
"/",
"(",
"1",
"+",
"z",
"-",
"zi",
")",
"-",
"b_tilde",
")",
"*",
"(",
"1",
"+",
"z",
")",
"*",
"np",
".",
"sqrt",
"(",
"cosmo",
"[",
"'omega_M_0'",
"]",
"*",
"(",
"1",
"+",
"z",
")",
"**",
"3",
"+",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
"return",
"(",
"dMdt",
",",
"Mz",
")"
] | Calculate accretion rate and mass history of a halo at any
redshift 'z' with mass 'Mi' at a lower redshift 'z'
Parameters
----------
z : float
Redshift to solve acc_rate / mass history. Note zi<z
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z' | [
"Calculate",
"accretion",
"rate",
"and",
"mass",
"history",
"of",
"a",
"halo",
"at",
"any",
"redshift",
"z",
"with",
"mass",
"Mi",
"at",
"a",
"lower",
"redshift",
"z"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L400-L438 |
astroduff/commah | commah/commah.py | MAH | def MAH(z, zi, Mi, **cosmo):
""" Calculate mass accretion history by looping function acc_rate
over redshift steps 'z' for halo of mass 'Mi' at redshift 'zi'
Parameters
----------
z : float / numpy array
Redshift to output MAH over. Note zi<z always
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float / numpy arrays of equivalent size to 'z'
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z'
"""
# Ensure that z is a 1D NumPy array
z = np.array(z, ndmin=1, dtype=float)
# Create a full array
dMdt_array = np.empty_like(z)
Mz_array = np.empty_like(z)
for i_ind, zval in enumerate(z):
# Solve the accretion rate and halo mass at each redshift step
dMdt, Mz = acc_rate(zval, zi, Mi, **cosmo)
dMdt_array[i_ind] = dMdt
Mz_array[i_ind] = Mz
return(dMdt_array, Mz_array) | python | def MAH(z, zi, Mi, **cosmo):
""" Calculate mass accretion history by looping function acc_rate
over redshift steps 'z' for halo of mass 'Mi' at redshift 'zi'
Parameters
----------
z : float / numpy array
Redshift to output MAH over. Note zi<z always
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float / numpy arrays of equivalent size to 'z'
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z'
"""
# Ensure that z is a 1D NumPy array
z = np.array(z, ndmin=1, dtype=float)
# Create a full array
dMdt_array = np.empty_like(z)
Mz_array = np.empty_like(z)
for i_ind, zval in enumerate(z):
# Solve the accretion rate and halo mass at each redshift step
dMdt, Mz = acc_rate(zval, zi, Mi, **cosmo)
dMdt_array[i_ind] = dMdt
Mz_array[i_ind] = Mz
return(dMdt_array, Mz_array) | [
"def",
"MAH",
"(",
"z",
",",
"zi",
",",
"Mi",
",",
"*",
"*",
"cosmo",
")",
":",
"# Ensure that z is a 1D NumPy array",
"z",
"=",
"np",
".",
"array",
"(",
"z",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"# Create a full array",
"dMdt_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"Mz_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"for",
"i_ind",
",",
"zval",
"in",
"enumerate",
"(",
"z",
")",
":",
"# Solve the accretion rate and halo mass at each redshift step",
"dMdt",
",",
"Mz",
"=",
"acc_rate",
"(",
"zval",
",",
"zi",
",",
"Mi",
",",
"*",
"*",
"cosmo",
")",
"dMdt_array",
"[",
"i_ind",
"]",
"=",
"dMdt",
"Mz_array",
"[",
"i_ind",
"]",
"=",
"Mz",
"return",
"(",
"dMdt_array",
",",
"Mz_array",
")"
] | Calculate mass accretion history by looping function acc_rate
over redshift steps 'z' for halo of mass 'Mi' at redshift 'zi'
Parameters
----------
z : float / numpy array
Redshift to output MAH over. Note zi<z always
zi : float
Redshift
Mi : float
Halo mass at redshift 'zi'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(dMdt, Mz) : float / numpy arrays of equivalent size to 'z'
Accretion rate [Msol/yr], halo mass [Msol] at redshift 'z' | [
"Calculate",
"mass",
"accretion",
"history",
"by",
"looping",
"function",
"acc_rate",
"over",
"redshift",
"steps",
"z",
"for",
"halo",
"of",
"mass",
"Mi",
"at",
"redshift",
"zi"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L441-L480 |
astroduff/commah | commah/commah.py | COM | def COM(z, M, **cosmo):
""" Calculate concentration for halo of mass 'M' at redshift 'z'
Parameters
----------
z : float / numpy array
Redshift to find concentration of halo
M : float / numpy array
Halo mass at redshift 'z'. Must be same size as 'z'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(c_array, sig_array, nu_array, zf_array) : float / numpy arrays
of equivalent size to 'z' and 'M'. Variables are
Concentration, Mass Variance 'sigma' this corresponds too,
the dimnesionless fluctuation this represents and formation redshift
"""
# Check that z and M are arrays
z = np.array(z, ndmin=1, dtype=float)
M = np.array(M, ndmin=1, dtype=float)
# Create array
c_array = np.empty_like(z)
sig_array = np.empty_like(z)
nu_array = np.empty_like(z)
zf_array = np.empty_like(z)
for i_ind, (zval, Mval) in enumerate(_izip(z, M)):
# Evaluate the indices at each redshift and mass combination
# that you want a concentration for, different to MAH which
# uses one a_tilde and b_tilde at the starting redshift only
a_tilde, b_tilde = calc_ab(zval, Mval, **cosmo)
# Minimize equation to solve for 1 unknown, 'c'
c = scipy.optimize.brentq(_minimize_c, 2, 1000,
args=(zval, a_tilde, b_tilde,
cosmo['A_scaling'], cosmo['omega_M_0'],
cosmo['omega_lambda_0']))
if np.isclose(c, 0):
print("Error solving for concentration with given redshift and "
"(probably) too small a mass")
c = -1
sig = -1
nu = -1
zf = -1
else:
# Calculate formation redshift for this concentration,
# redshift at which the scale radius = virial radius: z_-2
zf = formationz(c, zval, Ascaling=cosmo['A_scaling'],
omega_M_0=cosmo['omega_M_0'],
omega_lambda_0=cosmo['omega_lambda_0'])
R_Mass = cp.perturbation.mass_to_radius(Mval, **cosmo)
sig, err_sig = cp.perturbation.sigma_r(R_Mass, 0, **cosmo)
nu = 1.686/(sig*growthfactor(zval, norm=True, **cosmo))
c_array[i_ind] = c
sig_array[i_ind] = sig
nu_array[i_ind] = nu
zf_array[i_ind] = zf
return(c_array, sig_array, nu_array, zf_array) | python | def COM(z, M, **cosmo):
""" Calculate concentration for halo of mass 'M' at redshift 'z'
Parameters
----------
z : float / numpy array
Redshift to find concentration of halo
M : float / numpy array
Halo mass at redshift 'z'. Must be same size as 'z'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(c_array, sig_array, nu_array, zf_array) : float / numpy arrays
of equivalent size to 'z' and 'M'. Variables are
Concentration, Mass Variance 'sigma' this corresponds too,
the dimnesionless fluctuation this represents and formation redshift
"""
# Check that z and M are arrays
z = np.array(z, ndmin=1, dtype=float)
M = np.array(M, ndmin=1, dtype=float)
# Create array
c_array = np.empty_like(z)
sig_array = np.empty_like(z)
nu_array = np.empty_like(z)
zf_array = np.empty_like(z)
for i_ind, (zval, Mval) in enumerate(_izip(z, M)):
# Evaluate the indices at each redshift and mass combination
# that you want a concentration for, different to MAH which
# uses one a_tilde and b_tilde at the starting redshift only
a_tilde, b_tilde = calc_ab(zval, Mval, **cosmo)
# Minimize equation to solve for 1 unknown, 'c'
c = scipy.optimize.brentq(_minimize_c, 2, 1000,
args=(zval, a_tilde, b_tilde,
cosmo['A_scaling'], cosmo['omega_M_0'],
cosmo['omega_lambda_0']))
if np.isclose(c, 0):
print("Error solving for concentration with given redshift and "
"(probably) too small a mass")
c = -1
sig = -1
nu = -1
zf = -1
else:
# Calculate formation redshift for this concentration,
# redshift at which the scale radius = virial radius: z_-2
zf = formationz(c, zval, Ascaling=cosmo['A_scaling'],
omega_M_0=cosmo['omega_M_0'],
omega_lambda_0=cosmo['omega_lambda_0'])
R_Mass = cp.perturbation.mass_to_radius(Mval, **cosmo)
sig, err_sig = cp.perturbation.sigma_r(R_Mass, 0, **cosmo)
nu = 1.686/(sig*growthfactor(zval, norm=True, **cosmo))
c_array[i_ind] = c
sig_array[i_ind] = sig
nu_array[i_ind] = nu
zf_array[i_ind] = zf
return(c_array, sig_array, nu_array, zf_array) | [
"def",
"COM",
"(",
"z",
",",
"M",
",",
"*",
"*",
"cosmo",
")",
":",
"# Check that z and M are arrays",
"z",
"=",
"np",
".",
"array",
"(",
"z",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"M",
"=",
"np",
".",
"array",
"(",
"M",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"# Create array",
"c_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"sig_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"nu_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"zf_array",
"=",
"np",
".",
"empty_like",
"(",
"z",
")",
"for",
"i_ind",
",",
"(",
"zval",
",",
"Mval",
")",
"in",
"enumerate",
"(",
"_izip",
"(",
"z",
",",
"M",
")",
")",
":",
"# Evaluate the indices at each redshift and mass combination",
"# that you want a concentration for, different to MAH which",
"# uses one a_tilde and b_tilde at the starting redshift only",
"a_tilde",
",",
"b_tilde",
"=",
"calc_ab",
"(",
"zval",
",",
"Mval",
",",
"*",
"*",
"cosmo",
")",
"# Minimize equation to solve for 1 unknown, 'c'",
"c",
"=",
"scipy",
".",
"optimize",
".",
"brentq",
"(",
"_minimize_c",
",",
"2",
",",
"1000",
",",
"args",
"=",
"(",
"zval",
",",
"a_tilde",
",",
"b_tilde",
",",
"cosmo",
"[",
"'A_scaling'",
"]",
",",
"cosmo",
"[",
"'omega_M_0'",
"]",
",",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
")",
"if",
"np",
".",
"isclose",
"(",
"c",
",",
"0",
")",
":",
"print",
"(",
"\"Error solving for concentration with given redshift and \"",
"\"(probably) too small a mass\"",
")",
"c",
"=",
"-",
"1",
"sig",
"=",
"-",
"1",
"nu",
"=",
"-",
"1",
"zf",
"=",
"-",
"1",
"else",
":",
"# Calculate formation redshift for this concentration,",
"# redshift at which the scale radius = virial radius: z_-2",
"zf",
"=",
"formationz",
"(",
"c",
",",
"zval",
",",
"Ascaling",
"=",
"cosmo",
"[",
"'A_scaling'",
"]",
",",
"omega_M_0",
"=",
"cosmo",
"[",
"'omega_M_0'",
"]",
",",
"omega_lambda_0",
"=",
"cosmo",
"[",
"'omega_lambda_0'",
"]",
")",
"R_Mass",
"=",
"cp",
".",
"perturbation",
".",
"mass_to_radius",
"(",
"Mval",
",",
"*",
"*",
"cosmo",
")",
"sig",
",",
"err_sig",
"=",
"cp",
".",
"perturbation",
".",
"sigma_r",
"(",
"R_Mass",
",",
"0",
",",
"*",
"*",
"cosmo",
")",
"nu",
"=",
"1.686",
"/",
"(",
"sig",
"*",
"growthfactor",
"(",
"zval",
",",
"norm",
"=",
"True",
",",
"*",
"*",
"cosmo",
")",
")",
"c_array",
"[",
"i_ind",
"]",
"=",
"c",
"sig_array",
"[",
"i_ind",
"]",
"=",
"sig",
"nu_array",
"[",
"i_ind",
"]",
"=",
"nu",
"zf_array",
"[",
"i_ind",
"]",
"=",
"zf",
"return",
"(",
"c_array",
",",
"sig_array",
",",
"nu_array",
",",
"zf_array",
")"
] | Calculate concentration for halo of mass 'M' at redshift 'z'
Parameters
----------
z : float / numpy array
Redshift to find concentration of halo
M : float / numpy array
Halo mass at redshift 'z'. Must be same size as 'z'
cosmo : dict
Dictionary of cosmological parameters, similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
Returns
-------
(c_array, sig_array, nu_array, zf_array) : float / numpy arrays
of equivalent size to 'z' and 'M'. Variables are
Concentration, Mass Variance 'sigma' this corresponds too,
the dimnesionless fluctuation this represents and formation redshift | [
"Calculate",
"concentration",
"for",
"halo",
"of",
"mass",
"M",
"at",
"redshift",
"z"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L483-L552 |
astroduff/commah | commah/commah.py | run | def run(cosmology, zi=0, Mi=1e12, z=False, com=True, mah=True,
filename=None, verbose=None, retcosmo=None):
""" Run commah code on halo of mass 'Mi' at redshift 'zi' with
accretion and profile history at higher redshifts 'z'
This is based on Correa et al. (2015a,b,c)
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
zi : float / numpy array, optional
Redshift at which halo has mass 'Mi'. If float then all
halo masses 'Mi' are assumed to be at this redshift.
If array but Mi is float, then this halo mass is used across
all starting redshifts. If both Mi and zi are arrays then they
have to be the same size for one - to - one correspondence between
halo mass and the redshift at which it has that mass. Default is 0.
Mi : float / numpy array, optional
Halo mass 'Mi' at a redshift 'zi'. If float then all redshifts 'zi'
are solved for this halo mass. If array but zi is float, then this
redshift is applied to all halo masses. If both Mi and zi are
arrays then they have to be the same size for one - to - one
correspondence between halo mass and the redshift at which it
has that mass. Default is 1e12 Msol.
z : float / numpy array, optional
Redshift to solve commah code at. Must have zi<z else these steps
are skipped. Default is False, meaning commah is solved at z=zi
com : bool, optional
If true then solve for concentration-mass,
default is True.
mah : bool, optional
If true then solve for accretion rate and halo mass history,
default is True.
filename : bool / str, optional
If str is passed this is used as a filename for output of commah
verbose : bool, optional
If true then give comments, default is None.
retcosmo : bool, optional
Return cosmological parameters used as a dict if retcosmo = True,
default is None.
Returns
-------
dataset : structured dataset
dataset contains structured columns of size
(size(Mi) > size(z)) by size(z)
If mah = True and com = False then columns are
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'dMdt' is accretion rate [Msol/yr]
and 'Mz' is the halo mass at 'z' for a halo which was 'Mi' massive
at starting redshift 'zi'
If mah = False and com = True then columns are
('zi',float),('Mi',float),('z',float),('c',float),('sig',float),('nu',float),('zf',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'c' is NFW concentration of halo
at the redshift 'z', 'sig' is the mass variance 'sigma',
'nu' is the dimensionless fluctuation for halo mass 'Mi' at 'zi',
'zf' is the formation redshift for a halo of mass 'Mi' at redshift 'zi'
If mah = True and com = True then columns are:
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float),
('c',float),('sig',float),('nu',float),('zf',float)
file : structured dataset with name 'filename' if passed
Raises
------
Output -1
If com = False and mah = False as user has to select something.
Output -1
If 'zi' and 'Mi' are arrays of unequal size. Impossible to match
corresponding masses and redshifts of output.
Examples
--------
Examples should be written in doctest format, and should illustrate how
to use the function.
>>> import examples
>>> examples.runcommands() # A series of ways to query structured dataset
>>> examples.plotcommands() # Examples to plot data
"""
# Check user choices...
if not com and not mah:
print("User has to choose com=True and / or mah=True ")
return(-1)
# Convert arrays / lists to np.array
# and inflate redshift / mass axis
# to match each other for later loop
results = _checkinput(zi, Mi, z=z, verbose=verbose)
# Return if results is -1
if(results == -1):
return(-1)
# If not, unpack the returned iterable
else:
zi, Mi, z, lenz, lenm, lenzout = results
# At this point we will have lenm objects to iterate over
# Get the cosmological parameters for the given cosmology
cosmo = getcosmo(cosmology)
# Create output file if desired
if filename:
print("Output to file %r" % (filename))
fout = open(filename, 'wb')
# Create the structured dataset
try:
if mah and com:
if verbose:
print("Output requested is zi, Mi, z, dMdt, Mz, c, sig, nu, "
"zf")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z - "
" Accretion - Final Halo - concentration - "
" Mass - Peak - Formation z "+'\n')
fout.write("# - mass - -"
" rate - mass - - "
" Variance - Height - "+'\n')
fout.write("# - (M200) - - "
" (dM/dt) - (M200) - - "
" (sigma) - (nu) - "+'\n')
fout.write("# - [Msol] - - "
" [Msol/yr] - [Msol] - - "
" - - "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float), ('dMdt', float),
('Mz', float), ('c', float), ('sig', float),
('nu', float), ('zf', float)])
elif mah:
if verbose:
print("Output requested is zi, Mi, z, dMdt, Mz")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z -"
" Accretion - Final Halo "+'\n')
fout.write("# - mass - -"
" rate - mass "+'\n')
fout.write("# - (M200) - -"
" (dm/dt) - (M200) "+'\n')
fout.write("# - [Msol] - -"
" [Msol/yr] - [Msol] "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float),
('dMdt', float), ('Mz', float)])
else:
if verbose:
print("Output requested is zi, Mi, z, c, sig, nu, zf")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z - "
" concentration - "
" Mass - Peak - Formation z "+'\n')
fout.write("# - mass - -"
" -"
" Variance - Height - "+'\n')
fout.write("# - (M200) - - "
" - "
" (sigma) - (nu) - "+'\n')
fout.write("# - [Msol] - - "
" - "
" - - "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float), ('c', float),
('sig', float), ('nu', float), ('zf', float)])
# Now loop over the combination of initial redshift and halo mamss
for i_ind, (zval, Mval) in enumerate(_izip(zi, Mi)):
if verbose:
print("Output Halo of Mass Mi=%s at zi=%s" % (Mval, zval))
# For a given halo mass Mi at redshift zi need to know
# output redshifts 'z'
# Check that all requested redshifts are greater than
# input redshift, except if z is False, in which case
# only solve z at zi, i.e. remove a loop
if z is False:
ztemp = np.array(zval, ndmin=1, dtype=float)
else:
ztemp = np.array(z[z >= zval], dtype=float)
# Loop over the output redshifts
if ztemp.size:
# Return accretion rates and halo mass progenitors at
# redshifts 'z' for object of mass Mi at zi
dMdt, Mz = MAH(ztemp, zval, Mval, **cosmo)
if mah and com:
# More expensive to return concentrations
c, sig, nu, zf = COM(ztemp, Mz, **cosmo)
# Save all arrays
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], dMdt[j_ind], Mz[j_ind],
c[j_ind], sig[j_ind], nu[j_ind], zf[j_ind])
if filename:
fout.write(
"{}, {}, {}, {}, {}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], dMdt[j_ind],
Mz[j_ind], c[j_ind], sig[j_ind], nu[j_ind],
zf[j_ind]))
elif mah:
# Save only MAH arrays
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], dMdt[j_ind], Mz[j_ind])
if filename:
fout.write("{}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], dMdt[j_ind],
Mz[j_ind]))
else:
# Output only COM arrays
c, sig, nu, zf = COM(ztemp, Mz, **cosmo)
# For any halo mass Mi at redshift zi
# solve for c, sig, nu and zf
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], c[j_ind], sig[j_ind],
nu[j_ind], zf[j_ind])
if filename:
fout.write("{}, {}, {}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], c[j_ind], sig[j_ind],
nu[j_ind], zf[j_ind]))
# Make sure to close the file if it was opened
finally:
fout.close() if filename else None
if retcosmo:
return(dataset, cosmo)
else:
return(dataset) | python | def run(cosmology, zi=0, Mi=1e12, z=False, com=True, mah=True,
filename=None, verbose=None, retcosmo=None):
""" Run commah code on halo of mass 'Mi' at redshift 'zi' with
accretion and profile history at higher redshifts 'z'
This is based on Correa et al. (2015a,b,c)
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
zi : float / numpy array, optional
Redshift at which halo has mass 'Mi'. If float then all
halo masses 'Mi' are assumed to be at this redshift.
If array but Mi is float, then this halo mass is used across
all starting redshifts. If both Mi and zi are arrays then they
have to be the same size for one - to - one correspondence between
halo mass and the redshift at which it has that mass. Default is 0.
Mi : float / numpy array, optional
Halo mass 'Mi' at a redshift 'zi'. If float then all redshifts 'zi'
are solved for this halo mass. If array but zi is float, then this
redshift is applied to all halo masses. If both Mi and zi are
arrays then they have to be the same size for one - to - one
correspondence between halo mass and the redshift at which it
has that mass. Default is 1e12 Msol.
z : float / numpy array, optional
Redshift to solve commah code at. Must have zi<z else these steps
are skipped. Default is False, meaning commah is solved at z=zi
com : bool, optional
If true then solve for concentration-mass,
default is True.
mah : bool, optional
If true then solve for accretion rate and halo mass history,
default is True.
filename : bool / str, optional
If str is passed this is used as a filename for output of commah
verbose : bool, optional
If true then give comments, default is None.
retcosmo : bool, optional
Return cosmological parameters used as a dict if retcosmo = True,
default is None.
Returns
-------
dataset : structured dataset
dataset contains structured columns of size
(size(Mi) > size(z)) by size(z)
If mah = True and com = False then columns are
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'dMdt' is accretion rate [Msol/yr]
and 'Mz' is the halo mass at 'z' for a halo which was 'Mi' massive
at starting redshift 'zi'
If mah = False and com = True then columns are
('zi',float),('Mi',float),('z',float),('c',float),('sig',float),('nu',float),('zf',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'c' is NFW concentration of halo
at the redshift 'z', 'sig' is the mass variance 'sigma',
'nu' is the dimensionless fluctuation for halo mass 'Mi' at 'zi',
'zf' is the formation redshift for a halo of mass 'Mi' at redshift 'zi'
If mah = True and com = True then columns are:
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float),
('c',float),('sig',float),('nu',float),('zf',float)
file : structured dataset with name 'filename' if passed
Raises
------
Output -1
If com = False and mah = False as user has to select something.
Output -1
If 'zi' and 'Mi' are arrays of unequal size. Impossible to match
corresponding masses and redshifts of output.
Examples
--------
Examples should be written in doctest format, and should illustrate how
to use the function.
>>> import examples
>>> examples.runcommands() # A series of ways to query structured dataset
>>> examples.plotcommands() # Examples to plot data
"""
# Check user choices...
if not com and not mah:
print("User has to choose com=True and / or mah=True ")
return(-1)
# Convert arrays / lists to np.array
# and inflate redshift / mass axis
# to match each other for later loop
results = _checkinput(zi, Mi, z=z, verbose=verbose)
# Return if results is -1
if(results == -1):
return(-1)
# If not, unpack the returned iterable
else:
zi, Mi, z, lenz, lenm, lenzout = results
# At this point we will have lenm objects to iterate over
# Get the cosmological parameters for the given cosmology
cosmo = getcosmo(cosmology)
# Create output file if desired
if filename:
print("Output to file %r" % (filename))
fout = open(filename, 'wb')
# Create the structured dataset
try:
if mah and com:
if verbose:
print("Output requested is zi, Mi, z, dMdt, Mz, c, sig, nu, "
"zf")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z - "
" Accretion - Final Halo - concentration - "
" Mass - Peak - Formation z "+'\n')
fout.write("# - mass - -"
" rate - mass - - "
" Variance - Height - "+'\n')
fout.write("# - (M200) - - "
" (dM/dt) - (M200) - - "
" (sigma) - (nu) - "+'\n')
fout.write("# - [Msol] - - "
" [Msol/yr] - [Msol] - - "
" - - "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float), ('dMdt', float),
('Mz', float), ('c', float), ('sig', float),
('nu', float), ('zf', float)])
elif mah:
if verbose:
print("Output requested is zi, Mi, z, dMdt, Mz")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z -"
" Accretion - Final Halo "+'\n')
fout.write("# - mass - -"
" rate - mass "+'\n')
fout.write("# - (M200) - -"
" (dm/dt) - (M200) "+'\n')
fout.write("# - [Msol] - -"
" [Msol/yr] - [Msol] "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float),
('dMdt', float), ('Mz', float)])
else:
if verbose:
print("Output requested is zi, Mi, z, c, sig, nu, zf")
if filename:
fout.write(_getcosmoheader(cosmo)+'\n')
fout.write("# Initial z - Initial Halo - Output z - "
" concentration - "
" Mass - Peak - Formation z "+'\n')
fout.write("# - mass - -"
" -"
" Variance - Height - "+'\n')
fout.write("# - (M200) - - "
" - "
" (sigma) - (nu) - "+'\n')
fout.write("# - [Msol] - - "
" - "
" - - "+'\n')
dataset = np.zeros((lenm, lenzout), dtype=[('zi', float),
('Mi', float), ('z', float), ('c', float),
('sig', float), ('nu', float), ('zf', float)])
# Now loop over the combination of initial redshift and halo mamss
for i_ind, (zval, Mval) in enumerate(_izip(zi, Mi)):
if verbose:
print("Output Halo of Mass Mi=%s at zi=%s" % (Mval, zval))
# For a given halo mass Mi at redshift zi need to know
# output redshifts 'z'
# Check that all requested redshifts are greater than
# input redshift, except if z is False, in which case
# only solve z at zi, i.e. remove a loop
if z is False:
ztemp = np.array(zval, ndmin=1, dtype=float)
else:
ztemp = np.array(z[z >= zval], dtype=float)
# Loop over the output redshifts
if ztemp.size:
# Return accretion rates and halo mass progenitors at
# redshifts 'z' for object of mass Mi at zi
dMdt, Mz = MAH(ztemp, zval, Mval, **cosmo)
if mah and com:
# More expensive to return concentrations
c, sig, nu, zf = COM(ztemp, Mz, **cosmo)
# Save all arrays
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], dMdt[j_ind], Mz[j_ind],
c[j_ind], sig[j_ind], nu[j_ind], zf[j_ind])
if filename:
fout.write(
"{}, {}, {}, {}, {}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], dMdt[j_ind],
Mz[j_ind], c[j_ind], sig[j_ind], nu[j_ind],
zf[j_ind]))
elif mah:
# Save only MAH arrays
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], dMdt[j_ind], Mz[j_ind])
if filename:
fout.write("{}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], dMdt[j_ind],
Mz[j_ind]))
else:
# Output only COM arrays
c, sig, nu, zf = COM(ztemp, Mz, **cosmo)
# For any halo mass Mi at redshift zi
# solve for c, sig, nu and zf
for j_ind, j_val in enumerate(ztemp):
dataset[i_ind, j_ind] =\
(zval, Mval, ztemp[j_ind], c[j_ind], sig[j_ind],
nu[j_ind], zf[j_ind])
if filename:
fout.write("{}, {}, {}, {}, {}, {}, {} \n".format(
zval, Mval, ztemp[j_ind], c[j_ind], sig[j_ind],
nu[j_ind], zf[j_ind]))
# Make sure to close the file if it was opened
finally:
fout.close() if filename else None
if retcosmo:
return(dataset, cosmo)
else:
return(dataset) | [
"def",
"run",
"(",
"cosmology",
",",
"zi",
"=",
"0",
",",
"Mi",
"=",
"1e12",
",",
"z",
"=",
"False",
",",
"com",
"=",
"True",
",",
"mah",
"=",
"True",
",",
"filename",
"=",
"None",
",",
"verbose",
"=",
"None",
",",
"retcosmo",
"=",
"None",
")",
":",
"# Check user choices...",
"if",
"not",
"com",
"and",
"not",
"mah",
":",
"print",
"(",
"\"User has to choose com=True and / or mah=True \"",
")",
"return",
"(",
"-",
"1",
")",
"# Convert arrays / lists to np.array",
"# and inflate redshift / mass axis",
"# to match each other for later loop",
"results",
"=",
"_checkinput",
"(",
"zi",
",",
"Mi",
",",
"z",
"=",
"z",
",",
"verbose",
"=",
"verbose",
")",
"# Return if results is -1",
"if",
"(",
"results",
"==",
"-",
"1",
")",
":",
"return",
"(",
"-",
"1",
")",
"# If not, unpack the returned iterable",
"else",
":",
"zi",
",",
"Mi",
",",
"z",
",",
"lenz",
",",
"lenm",
",",
"lenzout",
"=",
"results",
"# At this point we will have lenm objects to iterate over",
"# Get the cosmological parameters for the given cosmology",
"cosmo",
"=",
"getcosmo",
"(",
"cosmology",
")",
"# Create output file if desired",
"if",
"filename",
":",
"print",
"(",
"\"Output to file %r\"",
"%",
"(",
"filename",
")",
")",
"fout",
"=",
"open",
"(",
"filename",
",",
"'wb'",
")",
"# Create the structured dataset",
"try",
":",
"if",
"mah",
"and",
"com",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Output requested is zi, Mi, z, dMdt, Mz, c, sig, nu, \"",
"\"zf\"",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"_getcosmoheader",
"(",
"cosmo",
")",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# Initial z - Initial Halo - Output z - \"",
"\" Accretion - Final Halo - concentration - \"",
"\" Mass - Peak - Formation z \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - mass - -\"",
"\" rate - mass - - \"",
"\" Variance - Height - \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - (M200) - - \"",
"\" (dM/dt) - (M200) - - \"",
"\" (sigma) - (nu) - \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - [Msol] - - \"",
"\" [Msol/yr] - [Msol] - - \"",
"\" - - \"",
"+",
"'\\n'",
")",
"dataset",
"=",
"np",
".",
"zeros",
"(",
"(",
"lenm",
",",
"lenzout",
")",
",",
"dtype",
"=",
"[",
"(",
"'zi'",
",",
"float",
")",
",",
"(",
"'Mi'",
",",
"float",
")",
",",
"(",
"'z'",
",",
"float",
")",
",",
"(",
"'dMdt'",
",",
"float",
")",
",",
"(",
"'Mz'",
",",
"float",
")",
",",
"(",
"'c'",
",",
"float",
")",
",",
"(",
"'sig'",
",",
"float",
")",
",",
"(",
"'nu'",
",",
"float",
")",
",",
"(",
"'zf'",
",",
"float",
")",
"]",
")",
"elif",
"mah",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Output requested is zi, Mi, z, dMdt, Mz\"",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"_getcosmoheader",
"(",
"cosmo",
")",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# Initial z - Initial Halo - Output z -\"",
"\" Accretion - Final Halo \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - mass - -\"",
"\" rate - mass \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - (M200) - -\"",
"\" (dm/dt) - (M200) \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - [Msol] - -\"",
"\" [Msol/yr] - [Msol] \"",
"+",
"'\\n'",
")",
"dataset",
"=",
"np",
".",
"zeros",
"(",
"(",
"lenm",
",",
"lenzout",
")",
",",
"dtype",
"=",
"[",
"(",
"'zi'",
",",
"float",
")",
",",
"(",
"'Mi'",
",",
"float",
")",
",",
"(",
"'z'",
",",
"float",
")",
",",
"(",
"'dMdt'",
",",
"float",
")",
",",
"(",
"'Mz'",
",",
"float",
")",
"]",
")",
"else",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Output requested is zi, Mi, z, c, sig, nu, zf\"",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"_getcosmoheader",
"(",
"cosmo",
")",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# Initial z - Initial Halo - Output z - \"",
"\" concentration - \"",
"\" Mass - Peak - Formation z \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - mass - -\"",
"\" -\"",
"\" Variance - Height - \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - (M200) - - \"",
"\" - \"",
"\" (sigma) - (nu) - \"",
"+",
"'\\n'",
")",
"fout",
".",
"write",
"(",
"\"# - [Msol] - - \"",
"\" - \"",
"\" - - \"",
"+",
"'\\n'",
")",
"dataset",
"=",
"np",
".",
"zeros",
"(",
"(",
"lenm",
",",
"lenzout",
")",
",",
"dtype",
"=",
"[",
"(",
"'zi'",
",",
"float",
")",
",",
"(",
"'Mi'",
",",
"float",
")",
",",
"(",
"'z'",
",",
"float",
")",
",",
"(",
"'c'",
",",
"float",
")",
",",
"(",
"'sig'",
",",
"float",
")",
",",
"(",
"'nu'",
",",
"float",
")",
",",
"(",
"'zf'",
",",
"float",
")",
"]",
")",
"# Now loop over the combination of initial redshift and halo mamss",
"for",
"i_ind",
",",
"(",
"zval",
",",
"Mval",
")",
"in",
"enumerate",
"(",
"_izip",
"(",
"zi",
",",
"Mi",
")",
")",
":",
"if",
"verbose",
":",
"print",
"(",
"\"Output Halo of Mass Mi=%s at zi=%s\"",
"%",
"(",
"Mval",
",",
"zval",
")",
")",
"# For a given halo mass Mi at redshift zi need to know",
"# output redshifts 'z'",
"# Check that all requested redshifts are greater than",
"# input redshift, except if z is False, in which case",
"# only solve z at zi, i.e. remove a loop",
"if",
"z",
"is",
"False",
":",
"ztemp",
"=",
"np",
".",
"array",
"(",
"zval",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"float",
")",
"else",
":",
"ztemp",
"=",
"np",
".",
"array",
"(",
"z",
"[",
"z",
">=",
"zval",
"]",
",",
"dtype",
"=",
"float",
")",
"# Loop over the output redshifts",
"if",
"ztemp",
".",
"size",
":",
"# Return accretion rates and halo mass progenitors at",
"# redshifts 'z' for object of mass Mi at zi",
"dMdt",
",",
"Mz",
"=",
"MAH",
"(",
"ztemp",
",",
"zval",
",",
"Mval",
",",
"*",
"*",
"cosmo",
")",
"if",
"mah",
"and",
"com",
":",
"# More expensive to return concentrations",
"c",
",",
"sig",
",",
"nu",
",",
"zf",
"=",
"COM",
"(",
"ztemp",
",",
"Mz",
",",
"*",
"*",
"cosmo",
")",
"# Save all arrays",
"for",
"j_ind",
",",
"j_val",
"in",
"enumerate",
"(",
"ztemp",
")",
":",
"dataset",
"[",
"i_ind",
",",
"j_ind",
"]",
"=",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"dMdt",
"[",
"j_ind",
"]",
",",
"Mz",
"[",
"j_ind",
"]",
",",
"c",
"[",
"j_ind",
"]",
",",
"sig",
"[",
"j_ind",
"]",
",",
"nu",
"[",
"j_ind",
"]",
",",
"zf",
"[",
"j_ind",
"]",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"\"{}, {}, {}, {}, {}, {}, {}, {}, {} \\n\"",
".",
"format",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"dMdt",
"[",
"j_ind",
"]",
",",
"Mz",
"[",
"j_ind",
"]",
",",
"c",
"[",
"j_ind",
"]",
",",
"sig",
"[",
"j_ind",
"]",
",",
"nu",
"[",
"j_ind",
"]",
",",
"zf",
"[",
"j_ind",
"]",
")",
")",
"elif",
"mah",
":",
"# Save only MAH arrays",
"for",
"j_ind",
",",
"j_val",
"in",
"enumerate",
"(",
"ztemp",
")",
":",
"dataset",
"[",
"i_ind",
",",
"j_ind",
"]",
"=",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"dMdt",
"[",
"j_ind",
"]",
",",
"Mz",
"[",
"j_ind",
"]",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"\"{}, {}, {}, {}, {} \\n\"",
".",
"format",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"dMdt",
"[",
"j_ind",
"]",
",",
"Mz",
"[",
"j_ind",
"]",
")",
")",
"else",
":",
"# Output only COM arrays",
"c",
",",
"sig",
",",
"nu",
",",
"zf",
"=",
"COM",
"(",
"ztemp",
",",
"Mz",
",",
"*",
"*",
"cosmo",
")",
"# For any halo mass Mi at redshift zi",
"# solve for c, sig, nu and zf",
"for",
"j_ind",
",",
"j_val",
"in",
"enumerate",
"(",
"ztemp",
")",
":",
"dataset",
"[",
"i_ind",
",",
"j_ind",
"]",
"=",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"c",
"[",
"j_ind",
"]",
",",
"sig",
"[",
"j_ind",
"]",
",",
"nu",
"[",
"j_ind",
"]",
",",
"zf",
"[",
"j_ind",
"]",
")",
"if",
"filename",
":",
"fout",
".",
"write",
"(",
"\"{}, {}, {}, {}, {}, {}, {} \\n\"",
".",
"format",
"(",
"zval",
",",
"Mval",
",",
"ztemp",
"[",
"j_ind",
"]",
",",
"c",
"[",
"j_ind",
"]",
",",
"sig",
"[",
"j_ind",
"]",
",",
"nu",
"[",
"j_ind",
"]",
",",
"zf",
"[",
"j_ind",
"]",
")",
")",
"# Make sure to close the file if it was opened",
"finally",
":",
"fout",
".",
"close",
"(",
")",
"if",
"filename",
"else",
"None",
"if",
"retcosmo",
":",
"return",
"(",
"dataset",
",",
"cosmo",
")",
"else",
":",
"return",
"(",
"dataset",
")"
] | Run commah code on halo of mass 'Mi' at redshift 'zi' with
accretion and profile history at higher redshifts 'z'
This is based on Correa et al. (2015a,b,c)
Parameters
----------
cosmology : str or dict
Can be named cosmology, default WMAP7 (aka DRAGONS), or
DRAGONS, WMAP1, WMAP3, WMAP5, WMAP7, WMAP9, Planck13, Planck15
or dictionary similar in format to:
{'N_nu': 0,'Y_He': 0.24, 'h': 0.702, 'n': 0.963,'omega_M_0': 0.275,
'omega_b_0': 0.0458,'omega_lambda_0': 0.725,'omega_n_0': 0.0,
'sigma_8': 0.816, 't_0': 13.76, 'tau': 0.088,'z_reion': 10.6}
zi : float / numpy array, optional
Redshift at which halo has mass 'Mi'. If float then all
halo masses 'Mi' are assumed to be at this redshift.
If array but Mi is float, then this halo mass is used across
all starting redshifts. If both Mi and zi are arrays then they
have to be the same size for one - to - one correspondence between
halo mass and the redshift at which it has that mass. Default is 0.
Mi : float / numpy array, optional
Halo mass 'Mi' at a redshift 'zi'. If float then all redshifts 'zi'
are solved for this halo mass. If array but zi is float, then this
redshift is applied to all halo masses. If both Mi and zi are
arrays then they have to be the same size for one - to - one
correspondence between halo mass and the redshift at which it
has that mass. Default is 1e12 Msol.
z : float / numpy array, optional
Redshift to solve commah code at. Must have zi<z else these steps
are skipped. Default is False, meaning commah is solved at z=zi
com : bool, optional
If true then solve for concentration-mass,
default is True.
mah : bool, optional
If true then solve for accretion rate and halo mass history,
default is True.
filename : bool / str, optional
If str is passed this is used as a filename for output of commah
verbose : bool, optional
If true then give comments, default is None.
retcosmo : bool, optional
Return cosmological parameters used as a dict if retcosmo = True,
default is None.
Returns
-------
dataset : structured dataset
dataset contains structured columns of size
(size(Mi) > size(z)) by size(z)
If mah = True and com = False then columns are
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'dMdt' is accretion rate [Msol/yr]
and 'Mz' is the halo mass at 'z' for a halo which was 'Mi' massive
at starting redshift 'zi'
If mah = False and com = True then columns are
('zi',float),('Mi',float),('z',float),('c',float),('sig',float),('nu',float),('zf',float)
where 'zi' is the starting redshift, 'Mi' is halo mass at zi
'z' is output redshift (NB z>zi), 'c' is NFW concentration of halo
at the redshift 'z', 'sig' is the mass variance 'sigma',
'nu' is the dimensionless fluctuation for halo mass 'Mi' at 'zi',
'zf' is the formation redshift for a halo of mass 'Mi' at redshift 'zi'
If mah = True and com = True then columns are:
('zi',float),('Mi',float),('z',float),('dMdt',float),('Mz',float),
('c',float),('sig',float),('nu',float),('zf',float)
file : structured dataset with name 'filename' if passed
Raises
------
Output -1
If com = False and mah = False as user has to select something.
Output -1
If 'zi' and 'Mi' are arrays of unequal size. Impossible to match
corresponding masses and redshifts of output.
Examples
--------
Examples should be written in doctest format, and should illustrate how
to use the function.
>>> import examples
>>> examples.runcommands() # A series of ways to query structured dataset
>>> examples.plotcommands() # Examples to plot data | [
"Run",
"commah",
"code",
"on",
"halo",
"of",
"mass",
"Mi",
"at",
"redshift",
"zi",
"with",
"accretion",
"and",
"profile",
"history",
"at",
"higher",
"redshifts",
"z",
"This",
"is",
"based",
"on",
"Correa",
"et",
"al",
".",
"(",
"2015a",
"b",
"c",
")"
] | train | https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/commah/commah.py#L555-L799 |
MatterMiners/cobald | cobald/daemon/core/config.py | load | def load(config_path: str):
"""
Load a configuration and keep it alive for the given context
:param config_path: path to a configuration file
"""
# we bind the config to _ to keep it alive
if os.path.splitext(config_path)[1] in ('.yaml', '.yml'):
_ = load_yaml_configuration(config_path, translator=PipelineTranslator())
elif os.path.splitext(config_path)[1] == '.py':
_ = load_python_configuration(config_path)
else:
raise ValueError('Unknown configuration extension: %r' % os.path.splitext(config_path)[1])
yield | python | def load(config_path: str):
"""
Load a configuration and keep it alive for the given context
:param config_path: path to a configuration file
"""
# we bind the config to _ to keep it alive
if os.path.splitext(config_path)[1] in ('.yaml', '.yml'):
_ = load_yaml_configuration(config_path, translator=PipelineTranslator())
elif os.path.splitext(config_path)[1] == '.py':
_ = load_python_configuration(config_path)
else:
raise ValueError('Unknown configuration extension: %r' % os.path.splitext(config_path)[1])
yield | [
"def",
"load",
"(",
"config_path",
":",
"str",
")",
":",
"# we bind the config to _ to keep it alive",
"if",
"os",
".",
"path",
".",
"splitext",
"(",
"config_path",
")",
"[",
"1",
"]",
"in",
"(",
"'.yaml'",
",",
"'.yml'",
")",
":",
"_",
"=",
"load_yaml_configuration",
"(",
"config_path",
",",
"translator",
"=",
"PipelineTranslator",
"(",
")",
")",
"elif",
"os",
".",
"path",
".",
"splitext",
"(",
"config_path",
")",
"[",
"1",
"]",
"==",
"'.py'",
":",
"_",
"=",
"load_python_configuration",
"(",
"config_path",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unknown configuration extension: %r'",
"%",
"os",
".",
"path",
".",
"splitext",
"(",
"config_path",
")",
"[",
"1",
"]",
")",
"yield"
] | Load a configuration and keep it alive for the given context
:param config_path: path to a configuration file | [
"Load",
"a",
"configuration",
"and",
"keep",
"it",
"alive",
"for",
"the",
"given",
"context"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/core/config.py#L10-L23 |
aroberge/experimental | experimental/transformers/repeat_keyword.py | transform_source | def transform_source(text):
'''Replaces instances of
repeat n:
by
for __VAR_i in range(n):
where __VAR_i is a string that does not appear elsewhere
in the code sample.
'''
loop_keyword = 'repeat'
nb = text.count(loop_keyword)
if nb == 0:
return text
var_names = get_unique_variable_names(text, nb)
toks = tokenize.generate_tokens(StringIO(text).readline)
result = []
replacing_keyword = False
for toktype, tokvalue, _, _, _ in toks:
if toktype == tokenize.NAME and tokvalue == loop_keyword:
result.extend([
(tokenize.NAME, 'for'),
(tokenize.NAME, var_names.pop()),
(tokenize.NAME, 'in'),
(tokenize.NAME, 'range'),
(tokenize.OP, '(')
])
replacing_keyword = True
elif replacing_keyword and tokvalue == ':':
result.extend([
(tokenize.OP, ')'),
(tokenize.OP, ':')
])
replacing_keyword = False
else:
result.append((toktype, tokvalue))
return tokenize.untokenize(result) | python | def transform_source(text):
'''Replaces instances of
repeat n:
by
for __VAR_i in range(n):
where __VAR_i is a string that does not appear elsewhere
in the code sample.
'''
loop_keyword = 'repeat'
nb = text.count(loop_keyword)
if nb == 0:
return text
var_names = get_unique_variable_names(text, nb)
toks = tokenize.generate_tokens(StringIO(text).readline)
result = []
replacing_keyword = False
for toktype, tokvalue, _, _, _ in toks:
if toktype == tokenize.NAME and tokvalue == loop_keyword:
result.extend([
(tokenize.NAME, 'for'),
(tokenize.NAME, var_names.pop()),
(tokenize.NAME, 'in'),
(tokenize.NAME, 'range'),
(tokenize.OP, '(')
])
replacing_keyword = True
elif replacing_keyword and tokvalue == ':':
result.extend([
(tokenize.OP, ')'),
(tokenize.OP, ':')
])
replacing_keyword = False
else:
result.append((toktype, tokvalue))
return tokenize.untokenize(result) | [
"def",
"transform_source",
"(",
"text",
")",
":",
"loop_keyword",
"=",
"'repeat'",
"nb",
"=",
"text",
".",
"count",
"(",
"loop_keyword",
")",
"if",
"nb",
"==",
"0",
":",
"return",
"text",
"var_names",
"=",
"get_unique_variable_names",
"(",
"text",
",",
"nb",
")",
"toks",
"=",
"tokenize",
".",
"generate_tokens",
"(",
"StringIO",
"(",
"text",
")",
".",
"readline",
")",
"result",
"=",
"[",
"]",
"replacing_keyword",
"=",
"False",
"for",
"toktype",
",",
"tokvalue",
",",
"_",
",",
"_",
",",
"_",
"in",
"toks",
":",
"if",
"toktype",
"==",
"tokenize",
".",
"NAME",
"and",
"tokvalue",
"==",
"loop_keyword",
":",
"result",
".",
"extend",
"(",
"[",
"(",
"tokenize",
".",
"NAME",
",",
"'for'",
")",
",",
"(",
"tokenize",
".",
"NAME",
",",
"var_names",
".",
"pop",
"(",
")",
")",
",",
"(",
"tokenize",
".",
"NAME",
",",
"'in'",
")",
",",
"(",
"tokenize",
".",
"NAME",
",",
"'range'",
")",
",",
"(",
"tokenize",
".",
"OP",
",",
"'('",
")",
"]",
")",
"replacing_keyword",
"=",
"True",
"elif",
"replacing_keyword",
"and",
"tokvalue",
"==",
"':'",
":",
"result",
".",
"extend",
"(",
"[",
"(",
"tokenize",
".",
"OP",
",",
"')'",
")",
",",
"(",
"tokenize",
".",
"OP",
",",
"':'",
")",
"]",
")",
"replacing_keyword",
"=",
"False",
"else",
":",
"result",
".",
"append",
"(",
"(",
"toktype",
",",
"tokvalue",
")",
")",
"return",
"tokenize",
".",
"untokenize",
"(",
"result",
")"
] | Replaces instances of
repeat n:
by
for __VAR_i in range(n):
where __VAR_i is a string that does not appear elsewhere
in the code sample. | [
"Replaces",
"instances",
"of"
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/repeat_keyword.py#L29-L70 |
aroberge/experimental | experimental/transformers/repeat_keyword.py | get_unique_variable_names | def get_unique_variable_names(text, nb):
'''returns a list of possible variables names that
are not found in the original text.'''
base_name = '__VAR_'
var_names = []
i = 0
j = 0
while j < nb:
tentative_name = base_name + str(i)
if text.count(tentative_name) == 0 and tentative_name not in ALL_NAMES:
var_names.append(tentative_name)
ALL_NAMES.append(tentative_name)
j += 1
i += 1
return var_names | python | def get_unique_variable_names(text, nb):
'''returns a list of possible variables names that
are not found in the original text.'''
base_name = '__VAR_'
var_names = []
i = 0
j = 0
while j < nb:
tentative_name = base_name + str(i)
if text.count(tentative_name) == 0 and tentative_name not in ALL_NAMES:
var_names.append(tentative_name)
ALL_NAMES.append(tentative_name)
j += 1
i += 1
return var_names | [
"def",
"get_unique_variable_names",
"(",
"text",
",",
"nb",
")",
":",
"base_name",
"=",
"'__VAR_'",
"var_names",
"=",
"[",
"]",
"i",
"=",
"0",
"j",
"=",
"0",
"while",
"j",
"<",
"nb",
":",
"tentative_name",
"=",
"base_name",
"+",
"str",
"(",
"i",
")",
"if",
"text",
".",
"count",
"(",
"tentative_name",
")",
"==",
"0",
"and",
"tentative_name",
"not",
"in",
"ALL_NAMES",
":",
"var_names",
".",
"append",
"(",
"tentative_name",
")",
"ALL_NAMES",
".",
"append",
"(",
"tentative_name",
")",
"j",
"+=",
"1",
"i",
"+=",
"1",
"return",
"var_names"
] | returns a list of possible variables names that
are not found in the original text. | [
"returns",
"a",
"list",
"of",
"possible",
"variables",
"names",
"that",
"are",
"not",
"found",
"in",
"the",
"original",
"text",
"."
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/repeat_keyword.py#L75-L89 |
quantmind/agile-toolkit | agiletoolkit/api/releases.py | Releases.tag | def tag(self, tag):
"""Get a release by tag
"""
url = '%s/tags/%s' % (self, tag)
response = self.http.get(url, auth=self.auth)
response.raise_for_status()
return response.json() | python | def tag(self, tag):
"""Get a release by tag
"""
url = '%s/tags/%s' % (self, tag)
response = self.http.get(url, auth=self.auth)
response.raise_for_status()
return response.json() | [
"def",
"tag",
"(",
"self",
",",
"tag",
")",
":",
"url",
"=",
"'%s/tags/%s'",
"%",
"(",
"self",
",",
"tag",
")",
"response",
"=",
"self",
".",
"http",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"self",
".",
"auth",
")",
"response",
".",
"raise_for_status",
"(",
")",
"return",
"response",
".",
"json",
"(",
")"
] | Get a release by tag | [
"Get",
"a",
"release",
"by",
"tag"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/releases.py#L37-L43 |
quantmind/agile-toolkit | agiletoolkit/api/releases.py | Releases.release_assets | def release_assets(self, release):
"""Assets for a given release
"""
release = self.as_id(release)
return self.get_list(url='%s/%s/assets' % (self, release)) | python | def release_assets(self, release):
"""Assets for a given release
"""
release = self.as_id(release)
return self.get_list(url='%s/%s/assets' % (self, release)) | [
"def",
"release_assets",
"(",
"self",
",",
"release",
")",
":",
"release",
"=",
"self",
".",
"as_id",
"(",
"release",
")",
"return",
"self",
".",
"get_list",
"(",
"url",
"=",
"'%s/%s/assets'",
"%",
"(",
"self",
",",
"release",
")",
")"
] | Assets for a given release | [
"Assets",
"for",
"a",
"given",
"release"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/releases.py#L53-L57 |
quantmind/agile-toolkit | agiletoolkit/api/releases.py | Releases.upload | def upload(self, release, filename, content_type=None):
"""Upload a file to a release
:param filename: filename to upload
:param content_type: optional content type
:return: json object from github
"""
release = self.as_id(release)
name = os.path.basename(filename)
if not content_type:
content_type, _ = mimetypes.guess_type(name)
if not content_type:
raise ValueError('content_type not known')
inputs = {'name': name}
url = '%s%s/%s/assets' % (self.uploads_url,
urlsplit(self.api_url).path,
release)
info = os.stat(filename)
size = info[stat.ST_SIZE]
response = self.http.post(
url, data=stream_upload(filename), auth=self.auth,
params=inputs,
headers={'content-type': content_type,
'content-length': str(size)})
response.raise_for_status()
return response.json() | python | def upload(self, release, filename, content_type=None):
"""Upload a file to a release
:param filename: filename to upload
:param content_type: optional content type
:return: json object from github
"""
release = self.as_id(release)
name = os.path.basename(filename)
if not content_type:
content_type, _ = mimetypes.guess_type(name)
if not content_type:
raise ValueError('content_type not known')
inputs = {'name': name}
url = '%s%s/%s/assets' % (self.uploads_url,
urlsplit(self.api_url).path,
release)
info = os.stat(filename)
size = info[stat.ST_SIZE]
response = self.http.post(
url, data=stream_upload(filename), auth=self.auth,
params=inputs,
headers={'content-type': content_type,
'content-length': str(size)})
response.raise_for_status()
return response.json() | [
"def",
"upload",
"(",
"self",
",",
"release",
",",
"filename",
",",
"content_type",
"=",
"None",
")",
":",
"release",
"=",
"self",
".",
"as_id",
"(",
"release",
")",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"if",
"not",
"content_type",
":",
"content_type",
",",
"_",
"=",
"mimetypes",
".",
"guess_type",
"(",
"name",
")",
"if",
"not",
"content_type",
":",
"raise",
"ValueError",
"(",
"'content_type not known'",
")",
"inputs",
"=",
"{",
"'name'",
":",
"name",
"}",
"url",
"=",
"'%s%s/%s/assets'",
"%",
"(",
"self",
".",
"uploads_url",
",",
"urlsplit",
"(",
"self",
".",
"api_url",
")",
".",
"path",
",",
"release",
")",
"info",
"=",
"os",
".",
"stat",
"(",
"filename",
")",
"size",
"=",
"info",
"[",
"stat",
".",
"ST_SIZE",
"]",
"response",
"=",
"self",
".",
"http",
".",
"post",
"(",
"url",
",",
"data",
"=",
"stream_upload",
"(",
"filename",
")",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"params",
"=",
"inputs",
",",
"headers",
"=",
"{",
"'content-type'",
":",
"content_type",
",",
"'content-length'",
":",
"str",
"(",
"size",
")",
"}",
")",
"response",
".",
"raise_for_status",
"(",
")",
"return",
"response",
".",
"json",
"(",
")"
] | Upload a file to a release
:param filename: filename to upload
:param content_type: optional content type
:return: json object from github | [
"Upload",
"a",
"file",
"to",
"a",
"release"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/releases.py#L59-L84 |
quantmind/agile-toolkit | agiletoolkit/api/releases.py | Releases.validate_tag | def validate_tag(self, tag_name, prefix=None):
"""Validate ``tag_name`` with the latest tag from github
If ``tag_name`` is a valid candidate, return the latest tag from github
"""
new_version = semantic_version(tag_name)
current = self.latest()
if current:
tag_name = current['tag_name']
if prefix:
tag_name = tag_name[len(prefix):]
tag_name = semantic_version(tag_name)
if tag_name >= new_version:
what = 'equal to' if tag_name == new_version else 'older than'
raise GithubException(
'Your local version "%s" is %s '
'the current github version "%s".\n'
'Bump the local version to '
'continue.' %
(
str(new_version),
what,
str(tag_name)
)
)
return current | python | def validate_tag(self, tag_name, prefix=None):
"""Validate ``tag_name`` with the latest tag from github
If ``tag_name`` is a valid candidate, return the latest tag from github
"""
new_version = semantic_version(tag_name)
current = self.latest()
if current:
tag_name = current['tag_name']
if prefix:
tag_name = tag_name[len(prefix):]
tag_name = semantic_version(tag_name)
if tag_name >= new_version:
what = 'equal to' if tag_name == new_version else 'older than'
raise GithubException(
'Your local version "%s" is %s '
'the current github version "%s".\n'
'Bump the local version to '
'continue.' %
(
str(new_version),
what,
str(tag_name)
)
)
return current | [
"def",
"validate_tag",
"(",
"self",
",",
"tag_name",
",",
"prefix",
"=",
"None",
")",
":",
"new_version",
"=",
"semantic_version",
"(",
"tag_name",
")",
"current",
"=",
"self",
".",
"latest",
"(",
")",
"if",
"current",
":",
"tag_name",
"=",
"current",
"[",
"'tag_name'",
"]",
"if",
"prefix",
":",
"tag_name",
"=",
"tag_name",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"tag_name",
"=",
"semantic_version",
"(",
"tag_name",
")",
"if",
"tag_name",
">=",
"new_version",
":",
"what",
"=",
"'equal to'",
"if",
"tag_name",
"==",
"new_version",
"else",
"'older than'",
"raise",
"GithubException",
"(",
"'Your local version \"%s\" is %s '",
"'the current github version \"%s\".\\n'",
"'Bump the local version to '",
"'continue.'",
"%",
"(",
"str",
"(",
"new_version",
")",
",",
"what",
",",
"str",
"(",
"tag_name",
")",
")",
")",
"return",
"current"
] | Validate ``tag_name`` with the latest tag from github
If ``tag_name`` is a valid candidate, return the latest tag from github | [
"Validate",
"tag_name",
"with",
"the",
"latest",
"tag",
"from",
"github"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/releases.py#L86-L111 |
teaearlgraycold/puni | puni/base.py | Note.full_url | def full_url(self):
"""Return the full reddit URL associated with the usernote.
Arguments:
subreddit: the subreddit name for the note (PRAW Subreddit object)
"""
if self.link == '':
return None
else:
return Note._expand_url(self.link, self.subreddit) | python | def full_url(self):
"""Return the full reddit URL associated with the usernote.
Arguments:
subreddit: the subreddit name for the note (PRAW Subreddit object)
"""
if self.link == '':
return None
else:
return Note._expand_url(self.link, self.subreddit) | [
"def",
"full_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"link",
"==",
"''",
":",
"return",
"None",
"else",
":",
"return",
"Note",
".",
"_expand_url",
"(",
"self",
".",
"link",
",",
"self",
".",
"subreddit",
")"
] | Return the full reddit URL associated with the usernote.
Arguments:
subreddit: the subreddit name for the note (PRAW Subreddit object) | [
"Return",
"the",
"full",
"reddit",
"URL",
"associated",
"with",
"the",
"usernote",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L87-L96 |
teaearlgraycold/puni | puni/base.py | Note._compress_url | def _compress_url(link):
"""Convert a reddit URL into the short-hand used by usernotes.
Arguments:
link: a link to a comment, submission, or message (str)
Returns a String of the shorthand URL
"""
comment_re = re.compile(r'/comments/([A-Za-z\d]{2,})(?:/[^\s]+/([A-Za-z\d]+))?')
message_re = re.compile(r'/message/messages/([A-Za-z\d]+)')
matches = re.findall(comment_re, link)
if len(matches) == 0:
matches = re.findall(message_re, link)
if len(matches) == 0:
return None
else:
return 'm,' + matches[0]
else:
if matches[0][1] == '':
return 'l,' + matches[0][0]
else:
return 'l,' + matches[0][0] + ',' + matches[0][1] | python | def _compress_url(link):
"""Convert a reddit URL into the short-hand used by usernotes.
Arguments:
link: a link to a comment, submission, or message (str)
Returns a String of the shorthand URL
"""
comment_re = re.compile(r'/comments/([A-Za-z\d]{2,})(?:/[^\s]+/([A-Za-z\d]+))?')
message_re = re.compile(r'/message/messages/([A-Za-z\d]+)')
matches = re.findall(comment_re, link)
if len(matches) == 0:
matches = re.findall(message_re, link)
if len(matches) == 0:
return None
else:
return 'm,' + matches[0]
else:
if matches[0][1] == '':
return 'l,' + matches[0][0]
else:
return 'l,' + matches[0][0] + ',' + matches[0][1] | [
"def",
"_compress_url",
"(",
"link",
")",
":",
"comment_re",
"=",
"re",
".",
"compile",
"(",
"r'/comments/([A-Za-z\\d]{2,})(?:/[^\\s]+/([A-Za-z\\d]+))?'",
")",
"message_re",
"=",
"re",
".",
"compile",
"(",
"r'/message/messages/([A-Za-z\\d]+)'",
")",
"matches",
"=",
"re",
".",
"findall",
"(",
"comment_re",
",",
"link",
")",
"if",
"len",
"(",
"matches",
")",
"==",
"0",
":",
"matches",
"=",
"re",
".",
"findall",
"(",
"message_re",
",",
"link",
")",
"if",
"len",
"(",
"matches",
")",
"==",
"0",
":",
"return",
"None",
"else",
":",
"return",
"'m,'",
"+",
"matches",
"[",
"0",
"]",
"else",
":",
"if",
"matches",
"[",
"0",
"]",
"[",
"1",
"]",
"==",
"''",
":",
"return",
"'l,'",
"+",
"matches",
"[",
"0",
"]",
"[",
"0",
"]",
"else",
":",
"return",
"'l,'",
"+",
"matches",
"[",
"0",
"]",
"[",
"0",
"]",
"+",
"','",
"+",
"matches",
"[",
"0",
"]",
"[",
"1",
"]"
] | Convert a reddit URL into the short-hand used by usernotes.
Arguments:
link: a link to a comment, submission, or message (str)
Returns a String of the shorthand URL | [
"Convert",
"a",
"reddit",
"URL",
"into",
"the",
"short",
"-",
"hand",
"used",
"by",
"usernotes",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L99-L122 |
teaearlgraycold/puni | puni/base.py | Note._expand_url | def _expand_url(short_link, subreddit=None):
"""Convert a usernote's URL short-hand into a full reddit URL.
Arguments:
subreddit: the subreddit the URL is for (PRAW Subreddit object or str)
short_link: the compressed link from a usernote (str)
Returns a String of the full URL.
"""
# Some URL structures for notes
message_scheme = 'https://reddit.com/message/messages/{}'
comment_scheme = 'https://reddit.com/r/{}/comments/{}/-/{}'
post_scheme = 'https://reddit.com/r/{}/comments/{}/'
if short_link == '':
return None
else:
parts = short_link.split(',')
if parts[0] == 'm':
return message_scheme.format(parts[1])
if parts[0] == 'l' and subreddit:
if len(parts) > 2:
return comment_scheme.format(subreddit, parts[1], parts[2])
else:
return post_scheme.format(subreddit, parts[1])
elif not subreddit:
raise ValueError('Subreddit name must be provided')
else:
return None | python | def _expand_url(short_link, subreddit=None):
"""Convert a usernote's URL short-hand into a full reddit URL.
Arguments:
subreddit: the subreddit the URL is for (PRAW Subreddit object or str)
short_link: the compressed link from a usernote (str)
Returns a String of the full URL.
"""
# Some URL structures for notes
message_scheme = 'https://reddit.com/message/messages/{}'
comment_scheme = 'https://reddit.com/r/{}/comments/{}/-/{}'
post_scheme = 'https://reddit.com/r/{}/comments/{}/'
if short_link == '':
return None
else:
parts = short_link.split(',')
if parts[0] == 'm':
return message_scheme.format(parts[1])
if parts[0] == 'l' and subreddit:
if len(parts) > 2:
return comment_scheme.format(subreddit, parts[1], parts[2])
else:
return post_scheme.format(subreddit, parts[1])
elif not subreddit:
raise ValueError('Subreddit name must be provided')
else:
return None | [
"def",
"_expand_url",
"(",
"short_link",
",",
"subreddit",
"=",
"None",
")",
":",
"# Some URL structures for notes",
"message_scheme",
"=",
"'https://reddit.com/message/messages/{}'",
"comment_scheme",
"=",
"'https://reddit.com/r/{}/comments/{}/-/{}'",
"post_scheme",
"=",
"'https://reddit.com/r/{}/comments/{}/'",
"if",
"short_link",
"==",
"''",
":",
"return",
"None",
"else",
":",
"parts",
"=",
"short_link",
".",
"split",
"(",
"','",
")",
"if",
"parts",
"[",
"0",
"]",
"==",
"'m'",
":",
"return",
"message_scheme",
".",
"format",
"(",
"parts",
"[",
"1",
"]",
")",
"if",
"parts",
"[",
"0",
"]",
"==",
"'l'",
"and",
"subreddit",
":",
"if",
"len",
"(",
"parts",
")",
">",
"2",
":",
"return",
"comment_scheme",
".",
"format",
"(",
"subreddit",
",",
"parts",
"[",
"1",
"]",
",",
"parts",
"[",
"2",
"]",
")",
"else",
":",
"return",
"post_scheme",
".",
"format",
"(",
"subreddit",
",",
"parts",
"[",
"1",
"]",
")",
"elif",
"not",
"subreddit",
":",
"raise",
"ValueError",
"(",
"'Subreddit name must be provided'",
")",
"else",
":",
"return",
"None"
] | Convert a usernote's URL short-hand into a full reddit URL.
Arguments:
subreddit: the subreddit the URL is for (PRAW Subreddit object or str)
short_link: the compressed link from a usernote (str)
Returns a String of the full URL. | [
"Convert",
"a",
"usernote",
"s",
"URL",
"short",
"-",
"hand",
"into",
"a",
"full",
"reddit",
"URL",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L125-L154 |
teaearlgraycold/puni | puni/base.py | UserNotes.get_json | def get_json(self):
"""Get the JSON stored on the usernotes wiki page.
Returns a dict representation of the usernotes (with the notes BLOB
decoded).
Raises:
RuntimeError if the usernotes version is incompatible with this
version of puni.
"""
try:
usernotes = self.subreddit.wiki[self.page_name].content_md
notes = json.loads(usernotes)
except NotFound:
self._init_notes()
else:
if notes['ver'] != self.schema:
raise RuntimeError(
'Usernotes schema is v{0}, puni requires v{1}'.
format(notes['ver'], self.schema)
)
self.cached_json = self._expand_json(notes)
return self.cached_json | python | def get_json(self):
"""Get the JSON stored on the usernotes wiki page.
Returns a dict representation of the usernotes (with the notes BLOB
decoded).
Raises:
RuntimeError if the usernotes version is incompatible with this
version of puni.
"""
try:
usernotes = self.subreddit.wiki[self.page_name].content_md
notes = json.loads(usernotes)
except NotFound:
self._init_notes()
else:
if notes['ver'] != self.schema:
raise RuntimeError(
'Usernotes schema is v{0}, puni requires v{1}'.
format(notes['ver'], self.schema)
)
self.cached_json = self._expand_json(notes)
return self.cached_json | [
"def",
"get_json",
"(",
"self",
")",
":",
"try",
":",
"usernotes",
"=",
"self",
".",
"subreddit",
".",
"wiki",
"[",
"self",
".",
"page_name",
"]",
".",
"content_md",
"notes",
"=",
"json",
".",
"loads",
"(",
"usernotes",
")",
"except",
"NotFound",
":",
"self",
".",
"_init_notes",
"(",
")",
"else",
":",
"if",
"notes",
"[",
"'ver'",
"]",
"!=",
"self",
".",
"schema",
":",
"raise",
"RuntimeError",
"(",
"'Usernotes schema is v{0}, puni requires v{1}'",
".",
"format",
"(",
"notes",
"[",
"'ver'",
"]",
",",
"self",
".",
"schema",
")",
")",
"self",
".",
"cached_json",
"=",
"self",
".",
"_expand_json",
"(",
"notes",
")",
"return",
"self",
".",
"cached_json"
] | Get the JSON stored on the usernotes wiki page.
Returns a dict representation of the usernotes (with the notes BLOB
decoded).
Raises:
RuntimeError if the usernotes version is incompatible with this
version of puni. | [
"Get",
"the",
"JSON",
"stored",
"on",
"the",
"usernotes",
"wiki",
"page",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L186-L210 |
teaearlgraycold/puni | puni/base.py | UserNotes._init_notes | def _init_notes(self):
"""Set up the UserNotes page with the initial JSON schema."""
self.cached_json = {
'ver': self.schema,
'users': {},
'constants': {
'users': [x.name for x in self.subreddit.moderator()],
'warnings': Note.warnings
}
}
self.set_json('Initializing JSON via puni', True) | python | def _init_notes(self):
"""Set up the UserNotes page with the initial JSON schema."""
self.cached_json = {
'ver': self.schema,
'users': {},
'constants': {
'users': [x.name for x in self.subreddit.moderator()],
'warnings': Note.warnings
}
}
self.set_json('Initializing JSON via puni', True) | [
"def",
"_init_notes",
"(",
"self",
")",
":",
"self",
".",
"cached_json",
"=",
"{",
"'ver'",
":",
"self",
".",
"schema",
",",
"'users'",
":",
"{",
"}",
",",
"'constants'",
":",
"{",
"'users'",
":",
"[",
"x",
".",
"name",
"for",
"x",
"in",
"self",
".",
"subreddit",
".",
"moderator",
"(",
")",
"]",
",",
"'warnings'",
":",
"Note",
".",
"warnings",
"}",
"}",
"self",
".",
"set_json",
"(",
"'Initializing JSON via puni'",
",",
"True",
")"
] | Set up the UserNotes page with the initial JSON schema. | [
"Set",
"up",
"the",
"UserNotes",
"page",
"with",
"the",
"initial",
"JSON",
"schema",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L212-L223 |
teaearlgraycold/puni | puni/base.py | UserNotes.set_json | def set_json(self, reason='', new_page=False):
"""Send the JSON from the cache to the usernotes wiki page.
Arguments:
reason: the change reason that will be posted to the wiki changelog
(str)
Raises:
OverflowError if the new JSON data is greater than max_page_size
"""
compressed_json = json.dumps(self._compress_json(self.cached_json))
if len(compressed_json) > self.max_page_size:
raise OverflowError(
'Usernotes page is too large (>{0} characters)'.
format(self.max_page_size)
)
if new_page:
self.subreddit.wiki.create(
self.page_name,
compressed_json,
reason
)
# Set the page as hidden and available to moderators only
self.subreddit.wiki[self.page_name].mod.update(False, permlevel=2)
else:
self.subreddit.wiki[self.page_name].edit(
compressed_json,
reason
) | python | def set_json(self, reason='', new_page=False):
"""Send the JSON from the cache to the usernotes wiki page.
Arguments:
reason: the change reason that will be posted to the wiki changelog
(str)
Raises:
OverflowError if the new JSON data is greater than max_page_size
"""
compressed_json = json.dumps(self._compress_json(self.cached_json))
if len(compressed_json) > self.max_page_size:
raise OverflowError(
'Usernotes page is too large (>{0} characters)'.
format(self.max_page_size)
)
if new_page:
self.subreddit.wiki.create(
self.page_name,
compressed_json,
reason
)
# Set the page as hidden and available to moderators only
self.subreddit.wiki[self.page_name].mod.update(False, permlevel=2)
else:
self.subreddit.wiki[self.page_name].edit(
compressed_json,
reason
) | [
"def",
"set_json",
"(",
"self",
",",
"reason",
"=",
"''",
",",
"new_page",
"=",
"False",
")",
":",
"compressed_json",
"=",
"json",
".",
"dumps",
"(",
"self",
".",
"_compress_json",
"(",
"self",
".",
"cached_json",
")",
")",
"if",
"len",
"(",
"compressed_json",
")",
">",
"self",
".",
"max_page_size",
":",
"raise",
"OverflowError",
"(",
"'Usernotes page is too large (>{0} characters)'",
".",
"format",
"(",
"self",
".",
"max_page_size",
")",
")",
"if",
"new_page",
":",
"self",
".",
"subreddit",
".",
"wiki",
".",
"create",
"(",
"self",
".",
"page_name",
",",
"compressed_json",
",",
"reason",
")",
"# Set the page as hidden and available to moderators only",
"self",
".",
"subreddit",
".",
"wiki",
"[",
"self",
".",
"page_name",
"]",
".",
"mod",
".",
"update",
"(",
"False",
",",
"permlevel",
"=",
"2",
")",
"else",
":",
"self",
".",
"subreddit",
".",
"wiki",
"[",
"self",
".",
"page_name",
"]",
".",
"edit",
"(",
"compressed_json",
",",
"reason",
")"
] | Send the JSON from the cache to the usernotes wiki page.
Arguments:
reason: the change reason that will be posted to the wiki changelog
(str)
Raises:
OverflowError if the new JSON data is greater than max_page_size | [
"Send",
"the",
"JSON",
"from",
"the",
"cache",
"to",
"the",
"usernotes",
"wiki",
"page",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L225-L254 |
teaearlgraycold/puni | puni/base.py | UserNotes.get_notes | def get_notes(self, user):
"""Return a list of Note objects for the given user.
Return an empty list if no notes are found.
Arguments:
user: the user to search for in the usernotes (str)
"""
# Try to search for all notes on a user, return an empty list if none
# are found.
try:
users_notes = []
for note in self.cached_json['users'][user]['ns']:
users_notes.append(Note(
user=user,
note=note['n'],
subreddit=self.subreddit,
mod=self._mod_from_index(note['m']),
link=note['l'],
warning=self._warning_from_index(note['w']),
note_time=note['t']
))
return users_notes
except KeyError:
# User not found
return [] | python | def get_notes(self, user):
"""Return a list of Note objects for the given user.
Return an empty list if no notes are found.
Arguments:
user: the user to search for in the usernotes (str)
"""
# Try to search for all notes on a user, return an empty list if none
# are found.
try:
users_notes = []
for note in self.cached_json['users'][user]['ns']:
users_notes.append(Note(
user=user,
note=note['n'],
subreddit=self.subreddit,
mod=self._mod_from_index(note['m']),
link=note['l'],
warning=self._warning_from_index(note['w']),
note_time=note['t']
))
return users_notes
except KeyError:
# User not found
return [] | [
"def",
"get_notes",
"(",
"self",
",",
"user",
")",
":",
"# Try to search for all notes on a user, return an empty list if none",
"# are found.",
"try",
":",
"users_notes",
"=",
"[",
"]",
"for",
"note",
"in",
"self",
".",
"cached_json",
"[",
"'users'",
"]",
"[",
"user",
"]",
"[",
"'ns'",
"]",
":",
"users_notes",
".",
"append",
"(",
"Note",
"(",
"user",
"=",
"user",
",",
"note",
"=",
"note",
"[",
"'n'",
"]",
",",
"subreddit",
"=",
"self",
".",
"subreddit",
",",
"mod",
"=",
"self",
".",
"_mod_from_index",
"(",
"note",
"[",
"'m'",
"]",
")",
",",
"link",
"=",
"note",
"[",
"'l'",
"]",
",",
"warning",
"=",
"self",
".",
"_warning_from_index",
"(",
"note",
"[",
"'w'",
"]",
")",
",",
"note_time",
"=",
"note",
"[",
"'t'",
"]",
")",
")",
"return",
"users_notes",
"except",
"KeyError",
":",
"# User not found",
"return",
"[",
"]"
] | Return a list of Note objects for the given user.
Return an empty list if no notes are found.
Arguments:
user: the user to search for in the usernotes (str) | [
"Return",
"a",
"list",
"of",
"Note",
"objects",
"for",
"the",
"given",
"user",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L257-L284 |
teaearlgraycold/puni | puni/base.py | UserNotes._expand_json | def _expand_json(self, j):
"""Decompress the BLOB portion of the usernotes.
Arguments:
j: the JSON returned from the wiki page (dict)
Returns a Dict with the 'blob' key removed and a 'users' key added
"""
decompressed_json = copy.copy(j)
decompressed_json.pop('blob', None) # Remove BLOB portion of JSON
# Decode and decompress JSON
compressed_data = base64.b64decode(j['blob'])
original_json = zlib.decompress(compressed_data).decode('utf-8')
decompressed_json['users'] = json.loads(original_json) # Insert users
return decompressed_json | python | def _expand_json(self, j):
"""Decompress the BLOB portion of the usernotes.
Arguments:
j: the JSON returned from the wiki page (dict)
Returns a Dict with the 'blob' key removed and a 'users' key added
"""
decompressed_json = copy.copy(j)
decompressed_json.pop('blob', None) # Remove BLOB portion of JSON
# Decode and decompress JSON
compressed_data = base64.b64decode(j['blob'])
original_json = zlib.decompress(compressed_data).decode('utf-8')
decompressed_json['users'] = json.loads(original_json) # Insert users
return decompressed_json | [
"def",
"_expand_json",
"(",
"self",
",",
"j",
")",
":",
"decompressed_json",
"=",
"copy",
".",
"copy",
"(",
"j",
")",
"decompressed_json",
".",
"pop",
"(",
"'blob'",
",",
"None",
")",
"# Remove BLOB portion of JSON",
"# Decode and decompress JSON",
"compressed_data",
"=",
"base64",
".",
"b64decode",
"(",
"j",
"[",
"'blob'",
"]",
")",
"original_json",
"=",
"zlib",
".",
"decompress",
"(",
"compressed_data",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"decompressed_json",
"[",
"'users'",
"]",
"=",
"json",
".",
"loads",
"(",
"original_json",
")",
"# Insert users",
"return",
"decompressed_json"
] | Decompress the BLOB portion of the usernotes.
Arguments:
j: the JSON returned from the wiki page (dict)
Returns a Dict with the 'blob' key removed and a 'users' key added | [
"Decompress",
"the",
"BLOB",
"portion",
"of",
"the",
"usernotes",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L307-L324 |
teaearlgraycold/puni | puni/base.py | UserNotes._compress_json | def _compress_json(self, j):
"""Compress the BLOB data portion of the usernotes.
Arguments:
j: the JSON in Schema v5 format (dict)
Returns a dict with the 'users' key removed and 'blob' key added
"""
compressed_json = copy.copy(j)
compressed_json.pop('users', None)
compressed_data = zlib.compress(
json.dumps(j['users']).encode('utf-8'),
self.zlib_compression_strength
)
b64_data = base64.b64encode(compressed_data).decode('utf-8')
compressed_json['blob'] = b64_data
return compressed_json | python | def _compress_json(self, j):
"""Compress the BLOB data portion of the usernotes.
Arguments:
j: the JSON in Schema v5 format (dict)
Returns a dict with the 'users' key removed and 'blob' key added
"""
compressed_json = copy.copy(j)
compressed_json.pop('users', None)
compressed_data = zlib.compress(
json.dumps(j['users']).encode('utf-8'),
self.zlib_compression_strength
)
b64_data = base64.b64encode(compressed_data).decode('utf-8')
compressed_json['blob'] = b64_data
return compressed_json | [
"def",
"_compress_json",
"(",
"self",
",",
"j",
")",
":",
"compressed_json",
"=",
"copy",
".",
"copy",
"(",
"j",
")",
"compressed_json",
".",
"pop",
"(",
"'users'",
",",
"None",
")",
"compressed_data",
"=",
"zlib",
".",
"compress",
"(",
"json",
".",
"dumps",
"(",
"j",
"[",
"'users'",
"]",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"self",
".",
"zlib_compression_strength",
")",
"b64_data",
"=",
"base64",
".",
"b64encode",
"(",
"compressed_data",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"compressed_json",
"[",
"'blob'",
"]",
"=",
"b64_data",
"return",
"compressed_json"
] | Compress the BLOB data portion of the usernotes.
Arguments:
j: the JSON in Schema v5 format (dict)
Returns a dict with the 'users' key removed and 'blob' key added | [
"Compress",
"the",
"BLOB",
"data",
"portion",
"of",
"the",
"usernotes",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L326-L345 |
teaearlgraycold/puni | puni/base.py | UserNotes.add_note | def add_note(self, note):
"""Add a note to the usernotes wiki page.
Arguments:
note: the note to be added (Note)
Returns the update message for the usernotes wiki
Raises:
ValueError when the warning type of the note can not be found in the
stored list of warnings.
"""
notes = self.cached_json
if not note.moderator:
note.moderator = self.r.user.me().name
# Get index of moderator in mod list from usernotes
# Add moderator to list if not already there
try:
mod_index = notes['constants']['users'].index(note.moderator)
except ValueError:
notes['constants']['users'].append(note.moderator)
mod_index = notes['constants']['users'].index(note.moderator)
# Get index of warning type from warnings list
# Add warning type to list if not already there
try:
warn_index = notes['constants']['warnings'].index(note.warning)
except ValueError:
if note.warning in Note.warnings:
notes['constants']['warnings'].append(note.warning)
warn_index = notes['constants']['warnings'].index(note.warning)
else:
raise ValueError('Warning type not valid: ' + note.warning)
new_note = {
'n': note.note,
't': note.time,
'm': mod_index,
'l': note.link,
'w': warn_index
}
try:
notes['users'][note.username]['ns'].insert(0, new_note)
except KeyError:
notes['users'][note.username] = {'ns': [new_note]}
return '"create new note on user {}" via puni'.format(note.username) | python | def add_note(self, note):
"""Add a note to the usernotes wiki page.
Arguments:
note: the note to be added (Note)
Returns the update message for the usernotes wiki
Raises:
ValueError when the warning type of the note can not be found in the
stored list of warnings.
"""
notes = self.cached_json
if not note.moderator:
note.moderator = self.r.user.me().name
# Get index of moderator in mod list from usernotes
# Add moderator to list if not already there
try:
mod_index = notes['constants']['users'].index(note.moderator)
except ValueError:
notes['constants']['users'].append(note.moderator)
mod_index = notes['constants']['users'].index(note.moderator)
# Get index of warning type from warnings list
# Add warning type to list if not already there
try:
warn_index = notes['constants']['warnings'].index(note.warning)
except ValueError:
if note.warning in Note.warnings:
notes['constants']['warnings'].append(note.warning)
warn_index = notes['constants']['warnings'].index(note.warning)
else:
raise ValueError('Warning type not valid: ' + note.warning)
new_note = {
'n': note.note,
't': note.time,
'm': mod_index,
'l': note.link,
'w': warn_index
}
try:
notes['users'][note.username]['ns'].insert(0, new_note)
except KeyError:
notes['users'][note.username] = {'ns': [new_note]}
return '"create new note on user {}" via puni'.format(note.username) | [
"def",
"add_note",
"(",
"self",
",",
"note",
")",
":",
"notes",
"=",
"self",
".",
"cached_json",
"if",
"not",
"note",
".",
"moderator",
":",
"note",
".",
"moderator",
"=",
"self",
".",
"r",
".",
"user",
".",
"me",
"(",
")",
".",
"name",
"# Get index of moderator in mod list from usernotes",
"# Add moderator to list if not already there",
"try",
":",
"mod_index",
"=",
"notes",
"[",
"'constants'",
"]",
"[",
"'users'",
"]",
".",
"index",
"(",
"note",
".",
"moderator",
")",
"except",
"ValueError",
":",
"notes",
"[",
"'constants'",
"]",
"[",
"'users'",
"]",
".",
"append",
"(",
"note",
".",
"moderator",
")",
"mod_index",
"=",
"notes",
"[",
"'constants'",
"]",
"[",
"'users'",
"]",
".",
"index",
"(",
"note",
".",
"moderator",
")",
"# Get index of warning type from warnings list",
"# Add warning type to list if not already there",
"try",
":",
"warn_index",
"=",
"notes",
"[",
"'constants'",
"]",
"[",
"'warnings'",
"]",
".",
"index",
"(",
"note",
".",
"warning",
")",
"except",
"ValueError",
":",
"if",
"note",
".",
"warning",
"in",
"Note",
".",
"warnings",
":",
"notes",
"[",
"'constants'",
"]",
"[",
"'warnings'",
"]",
".",
"append",
"(",
"note",
".",
"warning",
")",
"warn_index",
"=",
"notes",
"[",
"'constants'",
"]",
"[",
"'warnings'",
"]",
".",
"index",
"(",
"note",
".",
"warning",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Warning type not valid: '",
"+",
"note",
".",
"warning",
")",
"new_note",
"=",
"{",
"'n'",
":",
"note",
".",
"note",
",",
"'t'",
":",
"note",
".",
"time",
",",
"'m'",
":",
"mod_index",
",",
"'l'",
":",
"note",
".",
"link",
",",
"'w'",
":",
"warn_index",
"}",
"try",
":",
"notes",
"[",
"'users'",
"]",
"[",
"note",
".",
"username",
"]",
"[",
"'ns'",
"]",
".",
"insert",
"(",
"0",
",",
"new_note",
")",
"except",
"KeyError",
":",
"notes",
"[",
"'users'",
"]",
"[",
"note",
".",
"username",
"]",
"=",
"{",
"'ns'",
":",
"[",
"new_note",
"]",
"}",
"return",
"'\"create new note on user {}\" via puni'",
".",
"format",
"(",
"note",
".",
"username",
")"
] | Add a note to the usernotes wiki page.
Arguments:
note: the note to be added (Note)
Returns the update message for the usernotes wiki
Raises:
ValueError when the warning type of the note can not be found in the
stored list of warnings. | [
"Add",
"a",
"note",
"to",
"the",
"usernotes",
"wiki",
"page",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L348-L397 |
teaearlgraycold/puni | puni/base.py | UserNotes.remove_note | def remove_note(self, username, index):
"""Remove a single usernote from the usernotes.
Arguments:
username: the user that for whom you're removing a note (str)
index: the index of the note which is to be removed (int)
Returns the update message for the usernotes wiki
"""
self.cached_json['users'][username]['ns'].pop(index)
# Go ahead and remove the user's entry if they have no more notes left
if len(self.cached_json['users'][username]['ns']) == 0:
del self.cached_json['users'][username]
return '"delete note #{} on user {}" via puni'.format(index, username) | python | def remove_note(self, username, index):
"""Remove a single usernote from the usernotes.
Arguments:
username: the user that for whom you're removing a note (str)
index: the index of the note which is to be removed (int)
Returns the update message for the usernotes wiki
"""
self.cached_json['users'][username]['ns'].pop(index)
# Go ahead and remove the user's entry if they have no more notes left
if len(self.cached_json['users'][username]['ns']) == 0:
del self.cached_json['users'][username]
return '"delete note #{} on user {}" via puni'.format(index, username) | [
"def",
"remove_note",
"(",
"self",
",",
"username",
",",
"index",
")",
":",
"self",
".",
"cached_json",
"[",
"'users'",
"]",
"[",
"username",
"]",
"[",
"'ns'",
"]",
".",
"pop",
"(",
"index",
")",
"# Go ahead and remove the user's entry if they have no more notes left",
"if",
"len",
"(",
"self",
".",
"cached_json",
"[",
"'users'",
"]",
"[",
"username",
"]",
"[",
"'ns'",
"]",
")",
"==",
"0",
":",
"del",
"self",
".",
"cached_json",
"[",
"'users'",
"]",
"[",
"username",
"]",
"return",
"'\"delete note #{} on user {}\" via puni'",
".",
"format",
"(",
"index",
",",
"username",
")"
] | Remove a single usernote from the usernotes.
Arguments:
username: the user that for whom you're removing a note (str)
index: the index of the note which is to be removed (int)
Returns the update message for the usernotes wiki | [
"Remove",
"a",
"single",
"usernote",
"from",
"the",
"usernotes",
"."
] | train | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L400-L415 |
davidblaisonneau-orange/foreman | foreman/puppetClasses.py | PuppetClasses.load | def load(self):
""" Function load
Get the list of all objects
@return RETURN: A ForemanItem list
"""
cl_tmp = self.api.list(self.objName, limit=self.searchLimit).values()
cl = []
for i in cl_tmp:
cl.extend(i)
return {x[self.index]: ItemPuppetClass(self.api, x['id'],
self.objName, self.payloadObj,
x)
for x in cl} | python | def load(self):
""" Function load
Get the list of all objects
@return RETURN: A ForemanItem list
"""
cl_tmp = self.api.list(self.objName, limit=self.searchLimit).values()
cl = []
for i in cl_tmp:
cl.extend(i)
return {x[self.index]: ItemPuppetClass(self.api, x['id'],
self.objName, self.payloadObj,
x)
for x in cl} | [
"def",
"load",
"(",
"self",
")",
":",
"cl_tmp",
"=",
"self",
".",
"api",
".",
"list",
"(",
"self",
".",
"objName",
",",
"limit",
"=",
"self",
".",
"searchLimit",
")",
".",
"values",
"(",
")",
"cl",
"=",
"[",
"]",
"for",
"i",
"in",
"cl_tmp",
":",
"cl",
".",
"extend",
"(",
"i",
")",
"return",
"{",
"x",
"[",
"self",
".",
"index",
"]",
":",
"ItemPuppetClass",
"(",
"self",
".",
"api",
",",
"x",
"[",
"'id'",
"]",
",",
"self",
".",
"objName",
",",
"self",
".",
"payloadObj",
",",
"x",
")",
"for",
"x",
"in",
"cl",
"}"
] | Function load
Get the list of all objects
@return RETURN: A ForemanItem list | [
"Function",
"load",
"Get",
"the",
"list",
"of",
"all",
"objects"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/puppetClasses.py#L32-L45 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | is_related_to | def is_related_to(item, app_id, app_ver=None):
"""Return True if the item relates to the given app_id (and app_ver, if passed)."""
versionRange = item.get('versionRange')
if not versionRange:
return True
for vR in versionRange:
if not vR.get('targetApplication'):
return True
if get_related_targetApplication(vR, app_id, app_ver) is not None:
return True
return False | python | def is_related_to(item, app_id, app_ver=None):
"""Return True if the item relates to the given app_id (and app_ver, if passed)."""
versionRange = item.get('versionRange')
if not versionRange:
return True
for vR in versionRange:
if not vR.get('targetApplication'):
return True
if get_related_targetApplication(vR, app_id, app_ver) is not None:
return True
return False | [
"def",
"is_related_to",
"(",
"item",
",",
"app_id",
",",
"app_ver",
"=",
"None",
")",
":",
"versionRange",
"=",
"item",
".",
"get",
"(",
"'versionRange'",
")",
"if",
"not",
"versionRange",
":",
"return",
"True",
"for",
"vR",
"in",
"versionRange",
":",
"if",
"not",
"vR",
".",
"get",
"(",
"'targetApplication'",
")",
":",
"return",
"True",
"if",
"get_related_targetApplication",
"(",
"vR",
",",
"app_id",
",",
"app_ver",
")",
"is",
"not",
"None",
":",
"return",
"True",
"return",
"False"
] | Return True if the item relates to the given app_id (and app_ver, if passed). | [
"Return",
"True",
"if",
"the",
"item",
"relates",
"to",
"the",
"given",
"app_id",
"(",
"and",
"app_ver",
"if",
"passed",
")",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L46-L57 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | get_related_targetApplication | def get_related_targetApplication(vR, app_id, app_ver):
"""Return the first matching target application in this version range.
Returns None if there are no target applications or no matching ones."""
targetApplication = vR.get('targetApplication')
if not targetApplication:
return None
for tA in targetApplication:
guid = tA.get('guid')
if not guid or guid == app_id:
if not app_ver:
return tA
# We purposefully use maxVersion only, so that the blocklist contains items
# whose minimum version is ahead of the version we get passed. This means
# the blocklist we serve is "future-proof" for app upgrades.
if between(version_int(app_ver), '0', tA.get('maxVersion', '*')):
return tA
return None | python | def get_related_targetApplication(vR, app_id, app_ver):
"""Return the first matching target application in this version range.
Returns None if there are no target applications or no matching ones."""
targetApplication = vR.get('targetApplication')
if not targetApplication:
return None
for tA in targetApplication:
guid = tA.get('guid')
if not guid or guid == app_id:
if not app_ver:
return tA
# We purposefully use maxVersion only, so that the blocklist contains items
# whose minimum version is ahead of the version we get passed. This means
# the blocklist we serve is "future-proof" for app upgrades.
if between(version_int(app_ver), '0', tA.get('maxVersion', '*')):
return tA
return None | [
"def",
"get_related_targetApplication",
"(",
"vR",
",",
"app_id",
",",
"app_ver",
")",
":",
"targetApplication",
"=",
"vR",
".",
"get",
"(",
"'targetApplication'",
")",
"if",
"not",
"targetApplication",
":",
"return",
"None",
"for",
"tA",
"in",
"targetApplication",
":",
"guid",
"=",
"tA",
".",
"get",
"(",
"'guid'",
")",
"if",
"not",
"guid",
"or",
"guid",
"==",
"app_id",
":",
"if",
"not",
"app_ver",
":",
"return",
"tA",
"# We purposefully use maxVersion only, so that the blocklist contains items",
"# whose minimum version is ahead of the version we get passed. This means",
"# the blocklist we serve is \"future-proof\" for app upgrades.",
"if",
"between",
"(",
"version_int",
"(",
"app_ver",
")",
",",
"'0'",
",",
"tA",
".",
"get",
"(",
"'maxVersion'",
",",
"'*'",
")",
")",
":",
"return",
"tA",
"return",
"None"
] | Return the first matching target application in this version range.
Returns None if there are no target applications or no matching ones. | [
"Return",
"the",
"first",
"matching",
"target",
"application",
"in",
"this",
"version",
"range",
".",
"Returns",
"None",
"if",
"there",
"are",
"no",
"target",
"applications",
"or",
"no",
"matching",
"ones",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L60-L78 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | write_addons_items | def write_addons_items(xml_tree, records, app_id, api_ver=3, app_ver=None):
"""Generate the addons blocklists.
<emItem blockID="i372" id="[email protected]">
<versionRange minVersion="0" maxVersion="*" severity="3">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="39.0a1" maxVersion="*"/>
</targetApplication>
</versionRange>
<prefs>
<pref>browser.startup.homepage</pref>
<pref>browser.search.defaultenginename</pref>
</prefs>
</emItem>
"""
if not records:
return
emItems = etree.SubElement(xml_tree, 'emItems')
groupby = {}
for item in records:
if is_related_to(item, app_id, app_ver):
if item['guid'] in groupby:
emItem = groupby[item['guid']]
# When creating new records from the Kinto Admin we don't have proper blockID.
if 'blockID' in item:
# Remove the first caracter which is the letter i to
# compare the numeric value i45 < i356.
current_blockID = int(item['blockID'][1:])
previous_blockID = int(emItem.attrib['blockID'][1:])
# Group by and keep the biggest blockID in the XML file.
if current_blockID > previous_blockID:
emItem.attrib['blockID'] = item['blockID']
else:
# If the latest entry does not have any blockID attribute, its
# ID should be used. (the list of records is sorted by ascending
# last_modified).
# See https://bugzilla.mozilla.org/show_bug.cgi?id=1473194
emItem.attrib['blockID'] = item['id']
else:
emItem = etree.SubElement(emItems, 'emItem',
blockID=item.get('blockID', item['id']))
groupby[item['guid']] = emItem
prefs = etree.SubElement(emItem, 'prefs')
for p in item['prefs']:
pref = etree.SubElement(prefs, 'pref')
pref.text = p
# Set the add-on ID
emItem.set('id', item['guid'])
for field in ['name', 'os']:
if field in item:
emItem.set(field, item[field])
build_version_range(emItem, item, app_id) | python | def write_addons_items(xml_tree, records, app_id, api_ver=3, app_ver=None):
"""Generate the addons blocklists.
<emItem blockID="i372" id="[email protected]">
<versionRange minVersion="0" maxVersion="*" severity="3">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="39.0a1" maxVersion="*"/>
</targetApplication>
</versionRange>
<prefs>
<pref>browser.startup.homepage</pref>
<pref>browser.search.defaultenginename</pref>
</prefs>
</emItem>
"""
if not records:
return
emItems = etree.SubElement(xml_tree, 'emItems')
groupby = {}
for item in records:
if is_related_to(item, app_id, app_ver):
if item['guid'] in groupby:
emItem = groupby[item['guid']]
# When creating new records from the Kinto Admin we don't have proper blockID.
if 'blockID' in item:
# Remove the first caracter which is the letter i to
# compare the numeric value i45 < i356.
current_blockID = int(item['blockID'][1:])
previous_blockID = int(emItem.attrib['blockID'][1:])
# Group by and keep the biggest blockID in the XML file.
if current_blockID > previous_blockID:
emItem.attrib['blockID'] = item['blockID']
else:
# If the latest entry does not have any blockID attribute, its
# ID should be used. (the list of records is sorted by ascending
# last_modified).
# See https://bugzilla.mozilla.org/show_bug.cgi?id=1473194
emItem.attrib['blockID'] = item['id']
else:
emItem = etree.SubElement(emItems, 'emItem',
blockID=item.get('blockID', item['id']))
groupby[item['guid']] = emItem
prefs = etree.SubElement(emItem, 'prefs')
for p in item['prefs']:
pref = etree.SubElement(prefs, 'pref')
pref.text = p
# Set the add-on ID
emItem.set('id', item['guid'])
for field in ['name', 'os']:
if field in item:
emItem.set(field, item[field])
build_version_range(emItem, item, app_id) | [
"def",
"write_addons_items",
"(",
"xml_tree",
",",
"records",
",",
"app_id",
",",
"api_ver",
"=",
"3",
",",
"app_ver",
"=",
"None",
")",
":",
"if",
"not",
"records",
":",
"return",
"emItems",
"=",
"etree",
".",
"SubElement",
"(",
"xml_tree",
",",
"'emItems'",
")",
"groupby",
"=",
"{",
"}",
"for",
"item",
"in",
"records",
":",
"if",
"is_related_to",
"(",
"item",
",",
"app_id",
",",
"app_ver",
")",
":",
"if",
"item",
"[",
"'guid'",
"]",
"in",
"groupby",
":",
"emItem",
"=",
"groupby",
"[",
"item",
"[",
"'guid'",
"]",
"]",
"# When creating new records from the Kinto Admin we don't have proper blockID.",
"if",
"'blockID'",
"in",
"item",
":",
"# Remove the first caracter which is the letter i to",
"# compare the numeric value i45 < i356.",
"current_blockID",
"=",
"int",
"(",
"item",
"[",
"'blockID'",
"]",
"[",
"1",
":",
"]",
")",
"previous_blockID",
"=",
"int",
"(",
"emItem",
".",
"attrib",
"[",
"'blockID'",
"]",
"[",
"1",
":",
"]",
")",
"# Group by and keep the biggest blockID in the XML file.",
"if",
"current_blockID",
">",
"previous_blockID",
":",
"emItem",
".",
"attrib",
"[",
"'blockID'",
"]",
"=",
"item",
"[",
"'blockID'",
"]",
"else",
":",
"# If the latest entry does not have any blockID attribute, its",
"# ID should be used. (the list of records is sorted by ascending",
"# last_modified).",
"# See https://bugzilla.mozilla.org/show_bug.cgi?id=1473194",
"emItem",
".",
"attrib",
"[",
"'blockID'",
"]",
"=",
"item",
"[",
"'id'",
"]",
"else",
":",
"emItem",
"=",
"etree",
".",
"SubElement",
"(",
"emItems",
",",
"'emItem'",
",",
"blockID",
"=",
"item",
".",
"get",
"(",
"'blockID'",
",",
"item",
"[",
"'id'",
"]",
")",
")",
"groupby",
"[",
"item",
"[",
"'guid'",
"]",
"]",
"=",
"emItem",
"prefs",
"=",
"etree",
".",
"SubElement",
"(",
"emItem",
",",
"'prefs'",
")",
"for",
"p",
"in",
"item",
"[",
"'prefs'",
"]",
":",
"pref",
"=",
"etree",
".",
"SubElement",
"(",
"prefs",
",",
"'pref'",
")",
"pref",
".",
"text",
"=",
"p",
"# Set the add-on ID",
"emItem",
".",
"set",
"(",
"'id'",
",",
"item",
"[",
"'guid'",
"]",
")",
"for",
"field",
"in",
"[",
"'name'",
",",
"'os'",
"]",
":",
"if",
"field",
"in",
"item",
":",
"emItem",
".",
"set",
"(",
"field",
",",
"item",
"[",
"field",
"]",
")",
"build_version_range",
"(",
"emItem",
",",
"item",
",",
"app_id",
")"
] | Generate the addons blocklists.
<emItem blockID="i372" id="[email protected]">
<versionRange minVersion="0" maxVersion="*" severity="3">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="39.0a1" maxVersion="*"/>
</targetApplication>
</versionRange>
<prefs>
<pref>browser.startup.homepage</pref>
<pref>browser.search.defaultenginename</pref>
</prefs>
</emItem> | [
"Generate",
"the",
"addons",
"blocklists",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L88-L143 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | write_plugin_items | def write_plugin_items(xml_tree, records, app_id, api_ver=3, app_ver=None):
"""Generate the plugin blocklists.
<pluginItem blockID="p422">
<match name="filename" exp="JavaAppletPlugin\\.plugin"/>
<versionRange minVersion="Java 7 Update 16"
maxVersion="Java 7 Update 24"
severity="0" vulnerabilitystatus="1">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="17.0" maxVersion="*"/>
</targetApplication>
</versionRange>
</pluginItem>
"""
if not records:
return
pluginItems = etree.SubElement(xml_tree, 'pluginItems')
for item in records:
for versionRange in item.get('versionRange', []):
if not versionRange.get('targetApplication'):
add_plugin_item(pluginItems, item, versionRange,
app_id=app_id, api_ver=api_ver,
app_ver=app_ver)
else:
targetApplication = get_related_targetApplication(versionRange, app_id, app_ver)
if targetApplication is not None:
add_plugin_item(pluginItems, item, versionRange, targetApplication,
app_id=app_id, api_ver=api_ver,
app_ver=app_ver) | python | def write_plugin_items(xml_tree, records, app_id, api_ver=3, app_ver=None):
"""Generate the plugin blocklists.
<pluginItem blockID="p422">
<match name="filename" exp="JavaAppletPlugin\\.plugin"/>
<versionRange minVersion="Java 7 Update 16"
maxVersion="Java 7 Update 24"
severity="0" vulnerabilitystatus="1">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="17.0" maxVersion="*"/>
</targetApplication>
</versionRange>
</pluginItem>
"""
if not records:
return
pluginItems = etree.SubElement(xml_tree, 'pluginItems')
for item in records:
for versionRange in item.get('versionRange', []):
if not versionRange.get('targetApplication'):
add_plugin_item(pluginItems, item, versionRange,
app_id=app_id, api_ver=api_ver,
app_ver=app_ver)
else:
targetApplication = get_related_targetApplication(versionRange, app_id, app_ver)
if targetApplication is not None:
add_plugin_item(pluginItems, item, versionRange, targetApplication,
app_id=app_id, api_ver=api_ver,
app_ver=app_ver) | [
"def",
"write_plugin_items",
"(",
"xml_tree",
",",
"records",
",",
"app_id",
",",
"api_ver",
"=",
"3",
",",
"app_ver",
"=",
"None",
")",
":",
"if",
"not",
"records",
":",
"return",
"pluginItems",
"=",
"etree",
".",
"SubElement",
"(",
"xml_tree",
",",
"'pluginItems'",
")",
"for",
"item",
"in",
"records",
":",
"for",
"versionRange",
"in",
"item",
".",
"get",
"(",
"'versionRange'",
",",
"[",
"]",
")",
":",
"if",
"not",
"versionRange",
".",
"get",
"(",
"'targetApplication'",
")",
":",
"add_plugin_item",
"(",
"pluginItems",
",",
"item",
",",
"versionRange",
",",
"app_id",
"=",
"app_id",
",",
"api_ver",
"=",
"api_ver",
",",
"app_ver",
"=",
"app_ver",
")",
"else",
":",
"targetApplication",
"=",
"get_related_targetApplication",
"(",
"versionRange",
",",
"app_id",
",",
"app_ver",
")",
"if",
"targetApplication",
"is",
"not",
"None",
":",
"add_plugin_item",
"(",
"pluginItems",
",",
"item",
",",
"versionRange",
",",
"targetApplication",
",",
"app_id",
"=",
"app_id",
",",
"api_ver",
"=",
"api_ver",
",",
"app_ver",
"=",
"app_ver",
")"
] | Generate the plugin blocklists.
<pluginItem blockID="p422">
<match name="filename" exp="JavaAppletPlugin\\.plugin"/>
<versionRange minVersion="Java 7 Update 16"
maxVersion="Java 7 Update 24"
severity="0" vulnerabilitystatus="1">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="17.0" maxVersion="*"/>
</targetApplication>
</versionRange>
</pluginItem> | [
"Generate",
"the",
"plugin",
"blocklists",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L152-L182 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | write_gfx_items | def write_gfx_items(xml_tree, records, app_id, api_ver=3):
"""Generate the gfxBlacklistEntry.
<gfxBlacklistEntry blockID="g35">
<os>WINNT 6.1</os>
<vendor>0x10de</vendor>
<devices>
<device>0x0a6c</device>
</devices>
<feature>DIRECT2D</feature>
<featureStatus>BLOCKED_DRIVER_VERSION</featureStatus>
<driverVersion>8.17.12.5896</driverVersion>
<driverVersionComparator>LESS_THAN_OR_EQUAL</driverVersionComparator>
<versionRange minVersion="3.2" maxVersion="3.4" />
</gfxBlacklistEntry>
"""
if not records:
return
gfxItems = etree.SubElement(xml_tree, 'gfxItems')
for item in records:
is_record_related = ('guid' not in item or item['guid'] == app_id)
if is_record_related:
entry = etree.SubElement(gfxItems, 'gfxBlacklistEntry',
blockID=item.get('blockID', item['id']))
fields = ['os', 'vendor', 'feature', 'featureStatus',
'driverVersion', 'driverVersionComparator']
for field in fields:
if field in item:
node = etree.SubElement(entry, field)
node.text = item[field]
# Devices
if item['devices']:
devices = etree.SubElement(entry, 'devices')
for d in item['devices']:
device = etree.SubElement(devices, 'device')
device.text = d
if 'versionRange' in item:
version = item['versionRange']
versionRange = etree.SubElement(entry, 'versionRange')
for field in ['minVersion', 'maxVersion']:
value = version.get(field)
if value:
versionRange.set(field, str(value)) | python | def write_gfx_items(xml_tree, records, app_id, api_ver=3):
"""Generate the gfxBlacklistEntry.
<gfxBlacklistEntry blockID="g35">
<os>WINNT 6.1</os>
<vendor>0x10de</vendor>
<devices>
<device>0x0a6c</device>
</devices>
<feature>DIRECT2D</feature>
<featureStatus>BLOCKED_DRIVER_VERSION</featureStatus>
<driverVersion>8.17.12.5896</driverVersion>
<driverVersionComparator>LESS_THAN_OR_EQUAL</driverVersionComparator>
<versionRange minVersion="3.2" maxVersion="3.4" />
</gfxBlacklistEntry>
"""
if not records:
return
gfxItems = etree.SubElement(xml_tree, 'gfxItems')
for item in records:
is_record_related = ('guid' not in item or item['guid'] == app_id)
if is_record_related:
entry = etree.SubElement(gfxItems, 'gfxBlacklistEntry',
blockID=item.get('blockID', item['id']))
fields = ['os', 'vendor', 'feature', 'featureStatus',
'driverVersion', 'driverVersionComparator']
for field in fields:
if field in item:
node = etree.SubElement(entry, field)
node.text = item[field]
# Devices
if item['devices']:
devices = etree.SubElement(entry, 'devices')
for d in item['devices']:
device = etree.SubElement(devices, 'device')
device.text = d
if 'versionRange' in item:
version = item['versionRange']
versionRange = etree.SubElement(entry, 'versionRange')
for field in ['minVersion', 'maxVersion']:
value = version.get(field)
if value:
versionRange.set(field, str(value)) | [
"def",
"write_gfx_items",
"(",
"xml_tree",
",",
"records",
",",
"app_id",
",",
"api_ver",
"=",
"3",
")",
":",
"if",
"not",
"records",
":",
"return",
"gfxItems",
"=",
"etree",
".",
"SubElement",
"(",
"xml_tree",
",",
"'gfxItems'",
")",
"for",
"item",
"in",
"records",
":",
"is_record_related",
"=",
"(",
"'guid'",
"not",
"in",
"item",
"or",
"item",
"[",
"'guid'",
"]",
"==",
"app_id",
")",
"if",
"is_record_related",
":",
"entry",
"=",
"etree",
".",
"SubElement",
"(",
"gfxItems",
",",
"'gfxBlacklistEntry'",
",",
"blockID",
"=",
"item",
".",
"get",
"(",
"'blockID'",
",",
"item",
"[",
"'id'",
"]",
")",
")",
"fields",
"=",
"[",
"'os'",
",",
"'vendor'",
",",
"'feature'",
",",
"'featureStatus'",
",",
"'driverVersion'",
",",
"'driverVersionComparator'",
"]",
"for",
"field",
"in",
"fields",
":",
"if",
"field",
"in",
"item",
":",
"node",
"=",
"etree",
".",
"SubElement",
"(",
"entry",
",",
"field",
")",
"node",
".",
"text",
"=",
"item",
"[",
"field",
"]",
"# Devices",
"if",
"item",
"[",
"'devices'",
"]",
":",
"devices",
"=",
"etree",
".",
"SubElement",
"(",
"entry",
",",
"'devices'",
")",
"for",
"d",
"in",
"item",
"[",
"'devices'",
"]",
":",
"device",
"=",
"etree",
".",
"SubElement",
"(",
"devices",
",",
"'device'",
")",
"device",
".",
"text",
"=",
"d",
"if",
"'versionRange'",
"in",
"item",
":",
"version",
"=",
"item",
"[",
"'versionRange'",
"]",
"versionRange",
"=",
"etree",
".",
"SubElement",
"(",
"entry",
",",
"'versionRange'",
")",
"for",
"field",
"in",
"[",
"'minVersion'",
",",
"'maxVersion'",
"]",
":",
"value",
"=",
"version",
".",
"get",
"(",
"field",
")",
"if",
"value",
":",
"versionRange",
".",
"set",
"(",
"field",
",",
"str",
"(",
"value",
")",
")"
] | Generate the gfxBlacklistEntry.
<gfxBlacklistEntry blockID="g35">
<os>WINNT 6.1</os>
<vendor>0x10de</vendor>
<devices>
<device>0x0a6c</device>
</devices>
<feature>DIRECT2D</feature>
<featureStatus>BLOCKED_DRIVER_VERSION</featureStatus>
<driverVersion>8.17.12.5896</driverVersion>
<driverVersionComparator>LESS_THAN_OR_EQUAL</driverVersionComparator>
<versionRange minVersion="3.2" maxVersion="3.4" />
</gfxBlacklistEntry> | [
"Generate",
"the",
"gfxBlacklistEntry",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L276-L323 |
mozilla-services/amo2kinto | amo2kinto/exporter.py | write_cert_items | def write_cert_items(xml_tree, records, api_ver=3, app_id=None, app_ver=None):
"""Generate the certificate blocklists.
<certItem issuerName="MIGQMQswCQYD...IENB">
<serialNumber>UoRGnb96CUDTxIqVry6LBg==</serialNumber>
</certItem>
or
<certItem subject='MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5'
pubKeyHash='VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8='>
</certItem>
"""
if not records or not should_include_certs(app_id, app_ver):
return
certItems = etree.SubElement(xml_tree, 'certItems')
for item in records:
if item.get('subject') and item.get('pubKeyHash'):
cert = etree.SubElement(certItems, 'certItem',
subject=item['subject'],
pubKeyHash=item['pubKeyHash'])
else:
cert = etree.SubElement(certItems, 'certItem',
issuerName=item['issuerName'])
serialNumber = etree.SubElement(cert, 'serialNumber')
serialNumber.text = item['serialNumber'] | python | def write_cert_items(xml_tree, records, api_ver=3, app_id=None, app_ver=None):
"""Generate the certificate blocklists.
<certItem issuerName="MIGQMQswCQYD...IENB">
<serialNumber>UoRGnb96CUDTxIqVry6LBg==</serialNumber>
</certItem>
or
<certItem subject='MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5'
pubKeyHash='VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8='>
</certItem>
"""
if not records or not should_include_certs(app_id, app_ver):
return
certItems = etree.SubElement(xml_tree, 'certItems')
for item in records:
if item.get('subject') and item.get('pubKeyHash'):
cert = etree.SubElement(certItems, 'certItem',
subject=item['subject'],
pubKeyHash=item['pubKeyHash'])
else:
cert = etree.SubElement(certItems, 'certItem',
issuerName=item['issuerName'])
serialNumber = etree.SubElement(cert, 'serialNumber')
serialNumber.text = item['serialNumber'] | [
"def",
"write_cert_items",
"(",
"xml_tree",
",",
"records",
",",
"api_ver",
"=",
"3",
",",
"app_id",
"=",
"None",
",",
"app_ver",
"=",
"None",
")",
":",
"if",
"not",
"records",
"or",
"not",
"should_include_certs",
"(",
"app_id",
",",
"app_ver",
")",
":",
"return",
"certItems",
"=",
"etree",
".",
"SubElement",
"(",
"xml_tree",
",",
"'certItems'",
")",
"for",
"item",
"in",
"records",
":",
"if",
"item",
".",
"get",
"(",
"'subject'",
")",
"and",
"item",
".",
"get",
"(",
"'pubKeyHash'",
")",
":",
"cert",
"=",
"etree",
".",
"SubElement",
"(",
"certItems",
",",
"'certItem'",
",",
"subject",
"=",
"item",
"[",
"'subject'",
"]",
",",
"pubKeyHash",
"=",
"item",
"[",
"'pubKeyHash'",
"]",
")",
"else",
":",
"cert",
"=",
"etree",
".",
"SubElement",
"(",
"certItems",
",",
"'certItem'",
",",
"issuerName",
"=",
"item",
"[",
"'issuerName'",
"]",
")",
"serialNumber",
"=",
"etree",
".",
"SubElement",
"(",
"cert",
",",
"'serialNumber'",
")",
"serialNumber",
".",
"text",
"=",
"item",
"[",
"'serialNumber'",
"]"
] | Generate the certificate blocklists.
<certItem issuerName="MIGQMQswCQYD...IENB">
<serialNumber>UoRGnb96CUDTxIqVry6LBg==</serialNumber>
</certItem>
or
<certItem subject='MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5'
pubKeyHash='VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8='>
</certItem> | [
"Generate",
"the",
"certificate",
"blocklists",
"."
] | train | https://github.com/mozilla-services/amo2kinto/blob/1ec40647e77cf89badbea4a58d328243daed49a9/amo2kinto/exporter.py#L326-L352 |
quantmind/agile-toolkit | agiletoolkit/api/repo.py | GitRepo.label | def label(self, name, color, update=True):
"""Create or update a label
"""
url = '%s/labels' % self
data = dict(name=name, color=color)
response = self.http.post(
url, json=data, auth=self.auth, headers=self.headers
)
if response.status_code == 201:
return True
elif response.status_code == 422 and update:
url = '%s/%s' % (url, name)
response = self.http.patch(
url, json=data, auth=self.auth, headers=self.headers
)
response.raise_for_status()
return False | python | def label(self, name, color, update=True):
"""Create or update a label
"""
url = '%s/labels' % self
data = dict(name=name, color=color)
response = self.http.post(
url, json=data, auth=self.auth, headers=self.headers
)
if response.status_code == 201:
return True
elif response.status_code == 422 and update:
url = '%s/%s' % (url, name)
response = self.http.patch(
url, json=data, auth=self.auth, headers=self.headers
)
response.raise_for_status()
return False | [
"def",
"label",
"(",
"self",
",",
"name",
",",
"color",
",",
"update",
"=",
"True",
")",
":",
"url",
"=",
"'%s/labels'",
"%",
"self",
"data",
"=",
"dict",
"(",
"name",
"=",
"name",
",",
"color",
"=",
"color",
")",
"response",
"=",
"self",
".",
"http",
".",
"post",
"(",
"url",
",",
"json",
"=",
"data",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"headers",
"=",
"self",
".",
"headers",
")",
"if",
"response",
".",
"status_code",
"==",
"201",
":",
"return",
"True",
"elif",
"response",
".",
"status_code",
"==",
"422",
"and",
"update",
":",
"url",
"=",
"'%s/%s'",
"%",
"(",
"url",
",",
"name",
")",
"response",
"=",
"self",
".",
"http",
".",
"patch",
"(",
"url",
",",
"json",
"=",
"data",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"headers",
"=",
"self",
".",
"headers",
")",
"response",
".",
"raise_for_status",
"(",
")",
"return",
"False"
] | Create or update a label | [
"Create",
"or",
"update",
"a",
"label"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/api/repo.py#L25-L41 |
aroberge/experimental | experimental/transformers/utils/one2one.py | translate | def translate(source, dictionary):
'''A dictionary with a one-to-one translation of keywords is used
to provide the transformation.
'''
toks = tokenize.generate_tokens(StringIO(source).readline)
result = []
for toktype, tokvalue, _, _, _ in toks:
if toktype == tokenize.NAME and tokvalue in dictionary:
result.append((toktype, dictionary[tokvalue]))
else:
result.append((toktype, tokvalue))
return tokenize.untokenize(result) | python | def translate(source, dictionary):
'''A dictionary with a one-to-one translation of keywords is used
to provide the transformation.
'''
toks = tokenize.generate_tokens(StringIO(source).readline)
result = []
for toktype, tokvalue, _, _, _ in toks:
if toktype == tokenize.NAME and tokvalue in dictionary:
result.append((toktype, dictionary[tokvalue]))
else:
result.append((toktype, tokvalue))
return tokenize.untokenize(result) | [
"def",
"translate",
"(",
"source",
",",
"dictionary",
")",
":",
"toks",
"=",
"tokenize",
".",
"generate_tokens",
"(",
"StringIO",
"(",
"source",
")",
".",
"readline",
")",
"result",
"=",
"[",
"]",
"for",
"toktype",
",",
"tokvalue",
",",
"_",
",",
"_",
",",
"_",
"in",
"toks",
":",
"if",
"toktype",
"==",
"tokenize",
".",
"NAME",
"and",
"tokvalue",
"in",
"dictionary",
":",
"result",
".",
"append",
"(",
"(",
"toktype",
",",
"dictionary",
"[",
"tokvalue",
"]",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"toktype",
",",
"tokvalue",
")",
")",
"return",
"tokenize",
".",
"untokenize",
"(",
"result",
")"
] | A dictionary with a one-to-one translation of keywords is used
to provide the transformation. | [
"A",
"dictionary",
"with",
"a",
"one",
"-",
"to",
"-",
"one",
"translation",
"of",
"keywords",
"is",
"used",
"to",
"provide",
"the",
"transformation",
"."
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/utils/one2one.py#L12-L23 |
davidblaisonneau-orange/foreman | foreman/itemComputeRessource.py | ItemComputeRessource.enhance | def enhance(self):
""" Function enhance
Enhance the object with new item or enhanced items
"""
self.update({'images':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemImages)}) | python | def enhance(self):
""" Function enhance
Enhance the object with new item or enhanced items
"""
self.update({'images':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemImages)}) | [
"def",
"enhance",
"(",
"self",
")",
":",
"self",
".",
"update",
"(",
"{",
"'images'",
":",
"SubDict",
"(",
"self",
".",
"api",
",",
"self",
".",
"objName",
",",
"self",
".",
"payloadObj",
",",
"self",
".",
"key",
",",
"SubItemImages",
")",
"}",
")"
] | Function enhance
Enhance the object with new item or enhanced items | [
"Function",
"enhance",
"Enhance",
"the",
"object",
"with",
"new",
"item",
"or",
"enhanced",
"items"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemComputeRessource.py#L34-L41 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_runner.py | AsyncioRunner._run_payloads | async def _run_payloads(self):
"""Async component of _run"""
delay = 0.0
try:
while self.running.is_set():
await self._start_payloads()
await self._reap_payloads()
await asyncio.sleep(delay)
delay = min(delay + 0.1, 1.0)
except Exception:
await self._cancel_payloads()
raise | python | async def _run_payloads(self):
"""Async component of _run"""
delay = 0.0
try:
while self.running.is_set():
await self._start_payloads()
await self._reap_payloads()
await asyncio.sleep(delay)
delay = min(delay + 0.1, 1.0)
except Exception:
await self._cancel_payloads()
raise | [
"async",
"def",
"_run_payloads",
"(",
"self",
")",
":",
"delay",
"=",
"0.0",
"try",
":",
"while",
"self",
".",
"running",
".",
"is_set",
"(",
")",
":",
"await",
"self",
".",
"_start_payloads",
"(",
")",
"await",
"self",
".",
"_reap_payloads",
"(",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"delay",
")",
"delay",
"=",
"min",
"(",
"delay",
"+",
"0.1",
",",
"1.0",
")",
"except",
"Exception",
":",
"await",
"self",
".",
"_cancel_payloads",
"(",
")",
"raise"
] | Async component of _run | [
"Async",
"component",
"of",
"_run"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_runner.py#L29-L40 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_runner.py | AsyncioRunner._start_payloads | async def _start_payloads(self):
"""Start all queued payloads"""
with self._lock:
for coroutine in self._payloads:
task = self.event_loop.create_task(coroutine())
self._tasks.add(task)
self._payloads.clear()
await asyncio.sleep(0) | python | async def _start_payloads(self):
"""Start all queued payloads"""
with self._lock:
for coroutine in self._payloads:
task = self.event_loop.create_task(coroutine())
self._tasks.add(task)
self._payloads.clear()
await asyncio.sleep(0) | [
"async",
"def",
"_start_payloads",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"for",
"coroutine",
"in",
"self",
".",
"_payloads",
":",
"task",
"=",
"self",
".",
"event_loop",
".",
"create_task",
"(",
"coroutine",
"(",
")",
")",
"self",
".",
"_tasks",
".",
"add",
"(",
"task",
")",
"self",
".",
"_payloads",
".",
"clear",
"(",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"0",
")"
] | Start all queued payloads | [
"Start",
"all",
"queued",
"payloads"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_runner.py#L42-L49 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_runner.py | AsyncioRunner._reap_payloads | async def _reap_payloads(self):
"""Clean up all finished payloads"""
for task in self._tasks.copy():
if task.done():
self._tasks.remove(task)
if task.exception() is not None:
raise task.exception()
await asyncio.sleep(0) | python | async def _reap_payloads(self):
"""Clean up all finished payloads"""
for task in self._tasks.copy():
if task.done():
self._tasks.remove(task)
if task.exception() is not None:
raise task.exception()
await asyncio.sleep(0) | [
"async",
"def",
"_reap_payloads",
"(",
"self",
")",
":",
"for",
"task",
"in",
"self",
".",
"_tasks",
".",
"copy",
"(",
")",
":",
"if",
"task",
".",
"done",
"(",
")",
":",
"self",
".",
"_tasks",
".",
"remove",
"(",
"task",
")",
"if",
"task",
".",
"exception",
"(",
")",
"is",
"not",
"None",
":",
"raise",
"task",
".",
"exception",
"(",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"0",
")"
] | Clean up all finished payloads | [
"Clean",
"up",
"all",
"finished",
"payloads"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_runner.py#L51-L58 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_runner.py | AsyncioRunner._cancel_payloads | async def _cancel_payloads(self):
"""Cancel all remaining payloads"""
for task in self._tasks:
task.cancel()
await asyncio.sleep(0)
for task in self._tasks:
while not task.done():
await asyncio.sleep(0.1)
task.cancel() | python | async def _cancel_payloads(self):
"""Cancel all remaining payloads"""
for task in self._tasks:
task.cancel()
await asyncio.sleep(0)
for task in self._tasks:
while not task.done():
await asyncio.sleep(0.1)
task.cancel() | [
"async",
"def",
"_cancel_payloads",
"(",
"self",
")",
":",
"for",
"task",
"in",
"self",
".",
"_tasks",
":",
"task",
".",
"cancel",
"(",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"0",
")",
"for",
"task",
"in",
"self",
".",
"_tasks",
":",
"while",
"not",
"task",
".",
"done",
"(",
")",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"0.1",
")",
"task",
".",
"cancel",
"(",
")"
] | Cancel all remaining payloads | [
"Cancel",
"all",
"remaining",
"payloads"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_runner.py#L60-L68 |
Karaage-Cluster/python-tldap | tldap/ldap_passwd.py | check_password | def check_password(password: str, encrypted: str) -> bool:
""" Check a plaintext password against a hashed password. """
# some old passwords have {crypt} in lower case, and passlib wants it to be
# in upper case.
if encrypted.startswith("{crypt}"):
encrypted = "{CRYPT}" + encrypted[7:]
return pwd_context.verify(password, encrypted) | python | def check_password(password: str, encrypted: str) -> bool:
""" Check a plaintext password against a hashed password. """
# some old passwords have {crypt} in lower case, and passlib wants it to be
# in upper case.
if encrypted.startswith("{crypt}"):
encrypted = "{CRYPT}" + encrypted[7:]
return pwd_context.verify(password, encrypted) | [
"def",
"check_password",
"(",
"password",
":",
"str",
",",
"encrypted",
":",
"str",
")",
"->",
"bool",
":",
"# some old passwords have {crypt} in lower case, and passlib wants it to be",
"# in upper case.",
"if",
"encrypted",
".",
"startswith",
"(",
"\"{crypt}\"",
")",
":",
"encrypted",
"=",
"\"{CRYPT}\"",
"+",
"encrypted",
"[",
"7",
":",
"]",
"return",
"pwd_context",
".",
"verify",
"(",
"password",
",",
"encrypted",
")"
] | Check a plaintext password against a hashed password. | [
"Check",
"a",
"plaintext",
"password",
"against",
"a",
"hashed",
"password",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/ldap_passwd.py#L36-L42 |
quantmind/agile-toolkit | agiletoolkit/github/validate.py | validate | def validate(ctx, sandbox):
"""Check if version of repository is semantic
"""
m = RepoManager(ctx.obj['agile'])
if not sandbox or m.can_release('sandbox'):
click.echo(m.validate_version()) | python | def validate(ctx, sandbox):
"""Check if version of repository is semantic
"""
m = RepoManager(ctx.obj['agile'])
if not sandbox or m.can_release('sandbox'):
click.echo(m.validate_version()) | [
"def",
"validate",
"(",
"ctx",
",",
"sandbox",
")",
":",
"m",
"=",
"RepoManager",
"(",
"ctx",
".",
"obj",
"[",
"'agile'",
"]",
")",
"if",
"not",
"sandbox",
"or",
"m",
".",
"can_release",
"(",
"'sandbox'",
")",
":",
"click",
".",
"echo",
"(",
"m",
".",
"validate_version",
"(",
")",
")"
] | Check if version of repository is semantic | [
"Check",
"if",
"version",
"of",
"repository",
"is",
"semantic"
] | train | https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/github/validate.py#L11-L16 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.reset | def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError("reset called outside a transaction.")
self._transactions[-1] = [] | python | def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError("reset called outside a transaction.")
self._transactions[-1] = [] | [
"def",
"reset",
"(",
"self",
",",
"force_flush_cache",
":",
"bool",
"=",
"False",
")",
"->",
"None",
":",
"super",
"(",
"LDAPwrapper",
",",
"self",
")",
".",
"reset",
"(",
")",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"reset called outside a transaction.\"",
")",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
"=",
"[",
"]"
] | Reset transaction back to original state, discarding all
uncompleted transactions. | [
"Reset",
"transaction",
"back",
"to",
"original",
"state",
"discarding",
"all",
"uncompleted",
"transactions",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L68-L76 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper._cache_get_for_dn | def _cache_get_for_dn(self, dn: str) -> Dict[str, bytes]:
"""
Object state is cached. When an update is required the update will be
simulated on this cache, so that rollback information can be correct.
This function retrieves the cached data.
"""
# no cached item, retrieve from ldap
self._do_with_retry(
lambda obj: obj.search(
dn,
'(objectclass=*)',
ldap3.BASE,
attributes=['*', '+']))
results = self._obj.response
if len(results) < 1:
raise NoSuchObject("No results finding current value")
if len(results) > 1:
raise RuntimeError("Too many results finding current value")
return results[0]['raw_attributes'] | python | def _cache_get_for_dn(self, dn: str) -> Dict[str, bytes]:
"""
Object state is cached. When an update is required the update will be
simulated on this cache, so that rollback information can be correct.
This function retrieves the cached data.
"""
# no cached item, retrieve from ldap
self._do_with_retry(
lambda obj: obj.search(
dn,
'(objectclass=*)',
ldap3.BASE,
attributes=['*', '+']))
results = self._obj.response
if len(results) < 1:
raise NoSuchObject("No results finding current value")
if len(results) > 1:
raise RuntimeError("Too many results finding current value")
return results[0]['raw_attributes'] | [
"def",
"_cache_get_for_dn",
"(",
"self",
",",
"dn",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"bytes",
"]",
":",
"# no cached item, retrieve from ldap",
"self",
".",
"_do_with_retry",
"(",
"lambda",
"obj",
":",
"obj",
".",
"search",
"(",
"dn",
",",
"'(objectclass=*)'",
",",
"ldap3",
".",
"BASE",
",",
"attributes",
"=",
"[",
"'*'",
",",
"'+'",
"]",
")",
")",
"results",
"=",
"self",
".",
"_obj",
".",
"response",
"if",
"len",
"(",
"results",
")",
"<",
"1",
":",
"raise",
"NoSuchObject",
"(",
"\"No results finding current value\"",
")",
"if",
"len",
"(",
"results",
")",
">",
"1",
":",
"raise",
"RuntimeError",
"(",
"\"Too many results finding current value\"",
")",
"return",
"results",
"[",
"0",
"]",
"[",
"'raw_attributes'",
"]"
] | Object state is cached. When an update is required the update will be
simulated on this cache, so that rollback information can be correct.
This function retrieves the cached data. | [
"Object",
"state",
"is",
"cached",
".",
"When",
"an",
"update",
"is",
"required",
"the",
"update",
"will",
"be",
"simulated",
"on",
"this",
"cache",
"so",
"that",
"rollback",
"information",
"can",
"be",
"correct",
".",
"This",
"function",
"retrieves",
"the",
"cached",
"data",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L78-L98 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.is_dirty | def is_dirty(self) -> bool:
""" Are there uncommitted changes? """
if len(self._transactions) == 0:
raise RuntimeError("is_dirty called outside a transaction.")
if len(self._transactions[-1]) > 0:
return True
return False | python | def is_dirty(self) -> bool:
""" Are there uncommitted changes? """
if len(self._transactions) == 0:
raise RuntimeError("is_dirty called outside a transaction.")
if len(self._transactions[-1]) > 0:
return True
return False | [
"def",
"is_dirty",
"(",
"self",
")",
"->",
"bool",
":",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"is_dirty called outside a transaction.\"",
")",
"if",
"len",
"(",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
")",
">",
"0",
":",
"return",
"True",
"return",
"False"
] | Are there uncommitted changes? | [
"Are",
"there",
"uncommitted",
"changes?"
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L104-L110 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.leave_transaction_management | def leave_transaction_management(self) -> None:
"""
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
"""
if len(self._transactions) == 0:
raise RuntimeError("leave_transaction_management called outside transaction")
elif len(self._transactions[-1]) > 0:
raise RuntimeError("leave_transaction_management called with uncommited rollbacks")
else:
self._transactions.pop() | python | def leave_transaction_management(self) -> None:
"""
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
"""
if len(self._transactions) == 0:
raise RuntimeError("leave_transaction_management called outside transaction")
elif len(self._transactions[-1]) > 0:
raise RuntimeError("leave_transaction_management called with uncommited rollbacks")
else:
self._transactions.pop() | [
"def",
"leave_transaction_management",
"(",
"self",
")",
"->",
"None",
":",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"leave_transaction_management called outside transaction\"",
")",
"elif",
"len",
"(",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
")",
">",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"leave_transaction_management called with uncommited rollbacks\"",
")",
"else",
":",
"self",
".",
"_transactions",
".",
"pop",
"(",
")"
] | End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded. | [
"End",
"a",
"transaction",
".",
"Must",
"not",
"be",
"dirty",
"when",
"doing",
"so",
".",
"ie",
".",
"commit",
"()",
"or",
"rollback",
"()",
"must",
"be",
"called",
"if",
"changes",
"made",
".",
"If",
"dirty",
"changes",
"will",
"be",
"discarded",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L120-L131 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.commit | def commit(self) -> None:
"""
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
"""
if len(self._transactions) == 0:
raise RuntimeError("commit called outside transaction")
# If we have nested transactions, we don't actually commit, but push
# rollbacks up to previous transaction.
if len(self._transactions) > 1:
for on_rollback in reversed(self._transactions[-1]):
self._transactions[-2].insert(0, on_rollback)
_debug("commit")
self.reset() | python | def commit(self) -> None:
"""
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
"""
if len(self._transactions) == 0:
raise RuntimeError("commit called outside transaction")
# If we have nested transactions, we don't actually commit, but push
# rollbacks up to previous transaction.
if len(self._transactions) > 1:
for on_rollback in reversed(self._transactions[-1]):
self._transactions[-2].insert(0, on_rollback)
_debug("commit")
self.reset() | [
"def",
"commit",
"(",
"self",
")",
"->",
"None",
":",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"commit called outside transaction\"",
")",
"# If we have nested transactions, we don't actually commit, but push",
"# rollbacks up to previous transaction.",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
">",
"1",
":",
"for",
"on_rollback",
"in",
"reversed",
"(",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
")",
":",
"self",
".",
"_transactions",
"[",
"-",
"2",
"]",
".",
"insert",
"(",
"0",
",",
"on_rollback",
")",
"_debug",
"(",
"\"commit\"",
")",
"self",
".",
"reset",
"(",
")"
] | Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management. | [
"Attempt",
"to",
"commit",
"all",
"changes",
"to",
"LDAP",
"database",
".",
"i",
".",
"e",
".",
"forget",
"all",
"rollbacks",
".",
"However",
"stay",
"inside",
"transaction",
"management",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L133-L148 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.rollback | def rollback(self) -> None:
"""
Roll back to previous database state. However stay inside transaction
management.
"""
if len(self._transactions) == 0:
raise RuntimeError("rollback called outside transaction")
_debug("rollback:", self._transactions[-1])
# if something goes wrong here, nothing we can do about it, leave
# database as is.
try:
# for every rollback action ...
for on_rollback in self._transactions[-1]:
# execute it
_debug("--> rolling back", on_rollback)
self._do_with_retry(on_rollback)
except: # noqa: E722
_debug("--> rollback failed")
exc_class, exc, tb = sys.exc_info()
raise tldap.exceptions.RollbackError(
"FATAL Unrecoverable rollback error: %r" % exc)
finally:
# reset everything to clean state
_debug("--> rollback success")
self.reset() | python | def rollback(self) -> None:
"""
Roll back to previous database state. However stay inside transaction
management.
"""
if len(self._transactions) == 0:
raise RuntimeError("rollback called outside transaction")
_debug("rollback:", self._transactions[-1])
# if something goes wrong here, nothing we can do about it, leave
# database as is.
try:
# for every rollback action ...
for on_rollback in self._transactions[-1]:
# execute it
_debug("--> rolling back", on_rollback)
self._do_with_retry(on_rollback)
except: # noqa: E722
_debug("--> rollback failed")
exc_class, exc, tb = sys.exc_info()
raise tldap.exceptions.RollbackError(
"FATAL Unrecoverable rollback error: %r" % exc)
finally:
# reset everything to clean state
_debug("--> rollback success")
self.reset() | [
"def",
"rollback",
"(",
"self",
")",
"->",
"None",
":",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"rollback called outside transaction\"",
")",
"_debug",
"(",
"\"rollback:\"",
",",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
")",
"# if something goes wrong here, nothing we can do about it, leave",
"# database as is.",
"try",
":",
"# for every rollback action ...",
"for",
"on_rollback",
"in",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
":",
"# execute it",
"_debug",
"(",
"\"--> rolling back\"",
",",
"on_rollback",
")",
"self",
".",
"_do_with_retry",
"(",
"on_rollback",
")",
"except",
":",
"# noqa: E722",
"_debug",
"(",
"\"--> rollback failed\"",
")",
"exc_class",
",",
"exc",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"tldap",
".",
"exceptions",
".",
"RollbackError",
"(",
"\"FATAL Unrecoverable rollback error: %r\"",
"%",
"exc",
")",
"finally",
":",
"# reset everything to clean state",
"_debug",
"(",
"\"--> rollback success\"",
")",
"self",
".",
"reset",
"(",
")"
] | Roll back to previous database state. However stay inside transaction
management. | [
"Roll",
"back",
"to",
"previous",
"database",
"state",
".",
"However",
"stay",
"inside",
"transaction",
"management",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L150-L175 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper._process | def _process(self, on_commit: UpdateCallable, on_rollback: UpdateCallable) -> Any:
"""
Process action. oncommit is a callback to execute action, onrollback is
a callback to execute if the oncommit() has been called and a rollback
is required
"""
_debug("---> commiting", on_commit)
result = self._do_with_retry(on_commit)
if len(self._transactions) > 0:
# add statement to rollback log in case something goes wrong
self._transactions[-1].insert(0, on_rollback)
return result | python | def _process(self, on_commit: UpdateCallable, on_rollback: UpdateCallable) -> Any:
"""
Process action. oncommit is a callback to execute action, onrollback is
a callback to execute if the oncommit() has been called and a rollback
is required
"""
_debug("---> commiting", on_commit)
result = self._do_with_retry(on_commit)
if len(self._transactions) > 0:
# add statement to rollback log in case something goes wrong
self._transactions[-1].insert(0, on_rollback)
return result | [
"def",
"_process",
"(",
"self",
",",
"on_commit",
":",
"UpdateCallable",
",",
"on_rollback",
":",
"UpdateCallable",
")",
"->",
"Any",
":",
"_debug",
"(",
"\"---> commiting\"",
",",
"on_commit",
")",
"result",
"=",
"self",
".",
"_do_with_retry",
"(",
"on_commit",
")",
"if",
"len",
"(",
"self",
".",
"_transactions",
")",
">",
"0",
":",
"# add statement to rollback log in case something goes wrong",
"self",
".",
"_transactions",
"[",
"-",
"1",
"]",
".",
"insert",
"(",
"0",
",",
"on_rollback",
")",
"return",
"result"
] | Process action. oncommit is a callback to execute action, onrollback is
a callback to execute if the oncommit() has been called and a rollback
is required | [
"Process",
"action",
".",
"oncommit",
"is",
"a",
"callback",
"to",
"execute",
"action",
"onrollback",
"is",
"a",
"callback",
"to",
"execute",
"if",
"the",
"oncommit",
"()",
"has",
"been",
"called",
"and",
"a",
"rollback",
"is",
"required"
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L177-L191 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.add | def add(self, dn: str, mod_list: dict) -> None:
"""
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
"""
_debug("add", self, dn, mod_list)
# if rollback of add required, delete it
def on_commit(obj):
obj.add(dn, None, mod_list)
def on_rollback(obj):
obj.delete(dn)
# process this action
return self._process(on_commit, on_rollback) | python | def add(self, dn: str, mod_list: dict) -> None:
"""
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
"""
_debug("add", self, dn, mod_list)
# if rollback of add required, delete it
def on_commit(obj):
obj.add(dn, None, mod_list)
def on_rollback(obj):
obj.delete(dn)
# process this action
return self._process(on_commit, on_rollback) | [
"def",
"add",
"(",
"self",
",",
"dn",
":",
"str",
",",
"mod_list",
":",
"dict",
")",
"->",
"None",
":",
"_debug",
"(",
"\"add\"",
",",
"self",
",",
"dn",
",",
"mod_list",
")",
"# if rollback of add required, delete it",
"def",
"on_commit",
"(",
"obj",
")",
":",
"obj",
".",
"add",
"(",
"dn",
",",
"None",
",",
"mod_list",
")",
"def",
"on_rollback",
"(",
"obj",
")",
":",
"obj",
".",
"delete",
"(",
"dn",
")",
"# process this action",
"return",
"self",
".",
"_process",
"(",
"on_commit",
",",
"on_rollback",
")"
] | Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled. | [
"Add",
"a",
"DN",
"to",
"the",
"LDAP",
"database",
";",
"See",
"ldap",
"module",
".",
"Doesn",
"t",
"return",
"a",
"result",
"if",
"transactions",
"enabled",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L197-L213 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.modify | def modify(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify", self, dn, mod_list)
# need to work out how to reverse changes in mod_list; result in revlist
revlist = {}
# get the current cached attributes
result = self._cache_get_for_dn(dn)
# find the how to reverse mod_list (for rollback) and put result in
# revlist. Also simulate actions on cache.
for mod_type, l in six.iteritems(mod_list):
for mod_op, mod_vals in l:
_debug("attribute:", mod_type)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
_debug("attribute modify:", (mod_op, mod_vals))
if mod_vals is not None:
if not isinstance(mod_vals, list):
mod_vals = [mod_vals]
if mod_op == ldap3.MODIFY_ADD:
# reverse of MODIFY_ADD is MODIFY_DELETE
reverse = (ldap3.MODIFY_DELETE, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE and len(mod_vals) > 0:
# Reverse of MODIFY_DELETE is MODIFY_ADD, but only if value
# is given if mod_vals is None, this means all values where
# deleted.
reverse = (ldap3.MODIFY_ADD, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE \
or mod_op == ldap3.MODIFY_REPLACE:
if mod_type in result:
# If MODIFY_DELETE with no values or MODIFY_REPLACE
# then we have to replace all attributes with cached
# state
reverse = (
ldap3.MODIFY_REPLACE,
tldap.modlist.escape_list(result[mod_type])
)
else:
# except if we have no cached state for this DN, in
# which case we delete it.
reverse = (ldap3.MODIFY_DELETE, [])
else:
raise RuntimeError("mod_op of %d not supported" % mod_op)
reverse = [reverse]
_debug("attribute reverse:", reverse)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
revlist[mod_type] = reverse
_debug("--")
_debug("mod_list:", mod_list)
_debug("revlist:", revlist)
_debug("--")
# now the hard stuff is over, we get to the easy stuff
def on_commit(obj):
obj.modify(dn, mod_list)
def on_rollback(obj):
obj.modify(dn, revlist)
return self._process(on_commit, on_rollback) | python | def modify(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify", self, dn, mod_list)
# need to work out how to reverse changes in mod_list; result in revlist
revlist = {}
# get the current cached attributes
result = self._cache_get_for_dn(dn)
# find the how to reverse mod_list (for rollback) and put result in
# revlist. Also simulate actions on cache.
for mod_type, l in six.iteritems(mod_list):
for mod_op, mod_vals in l:
_debug("attribute:", mod_type)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
_debug("attribute modify:", (mod_op, mod_vals))
if mod_vals is not None:
if not isinstance(mod_vals, list):
mod_vals = [mod_vals]
if mod_op == ldap3.MODIFY_ADD:
# reverse of MODIFY_ADD is MODIFY_DELETE
reverse = (ldap3.MODIFY_DELETE, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE and len(mod_vals) > 0:
# Reverse of MODIFY_DELETE is MODIFY_ADD, but only if value
# is given if mod_vals is None, this means all values where
# deleted.
reverse = (ldap3.MODIFY_ADD, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE \
or mod_op == ldap3.MODIFY_REPLACE:
if mod_type in result:
# If MODIFY_DELETE with no values or MODIFY_REPLACE
# then we have to replace all attributes with cached
# state
reverse = (
ldap3.MODIFY_REPLACE,
tldap.modlist.escape_list(result[mod_type])
)
else:
# except if we have no cached state for this DN, in
# which case we delete it.
reverse = (ldap3.MODIFY_DELETE, [])
else:
raise RuntimeError("mod_op of %d not supported" % mod_op)
reverse = [reverse]
_debug("attribute reverse:", reverse)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
revlist[mod_type] = reverse
_debug("--")
_debug("mod_list:", mod_list)
_debug("revlist:", revlist)
_debug("--")
# now the hard stuff is over, we get to the easy stuff
def on_commit(obj):
obj.modify(dn, mod_list)
def on_rollback(obj):
obj.modify(dn, revlist)
return self._process(on_commit, on_rollback) | [
"def",
"modify",
"(",
"self",
",",
"dn",
":",
"str",
",",
"mod_list",
":",
"dict",
")",
"->",
"None",
":",
"_debug",
"(",
"\"modify\"",
",",
"self",
",",
"dn",
",",
"mod_list",
")",
"# need to work out how to reverse changes in mod_list; result in revlist",
"revlist",
"=",
"{",
"}",
"# get the current cached attributes",
"result",
"=",
"self",
".",
"_cache_get_for_dn",
"(",
"dn",
")",
"# find the how to reverse mod_list (for rollback) and put result in",
"# revlist. Also simulate actions on cache.",
"for",
"mod_type",
",",
"l",
"in",
"six",
".",
"iteritems",
"(",
"mod_list",
")",
":",
"for",
"mod_op",
",",
"mod_vals",
"in",
"l",
":",
"_debug",
"(",
"\"attribute:\"",
",",
"mod_type",
")",
"if",
"mod_type",
"in",
"result",
":",
"_debug",
"(",
"\"attribute cache:\"",
",",
"result",
"[",
"mod_type",
"]",
")",
"else",
":",
"_debug",
"(",
"\"attribute cache is empty\"",
")",
"_debug",
"(",
"\"attribute modify:\"",
",",
"(",
"mod_op",
",",
"mod_vals",
")",
")",
"if",
"mod_vals",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"mod_vals",
",",
"list",
")",
":",
"mod_vals",
"=",
"[",
"mod_vals",
"]",
"if",
"mod_op",
"==",
"ldap3",
".",
"MODIFY_ADD",
":",
"# reverse of MODIFY_ADD is MODIFY_DELETE",
"reverse",
"=",
"(",
"ldap3",
".",
"MODIFY_DELETE",
",",
"mod_vals",
")",
"elif",
"mod_op",
"==",
"ldap3",
".",
"MODIFY_DELETE",
"and",
"len",
"(",
"mod_vals",
")",
">",
"0",
":",
"# Reverse of MODIFY_DELETE is MODIFY_ADD, but only if value",
"# is given if mod_vals is None, this means all values where",
"# deleted.",
"reverse",
"=",
"(",
"ldap3",
".",
"MODIFY_ADD",
",",
"mod_vals",
")",
"elif",
"mod_op",
"==",
"ldap3",
".",
"MODIFY_DELETE",
"or",
"mod_op",
"==",
"ldap3",
".",
"MODIFY_REPLACE",
":",
"if",
"mod_type",
"in",
"result",
":",
"# If MODIFY_DELETE with no values or MODIFY_REPLACE",
"# then we have to replace all attributes with cached",
"# state",
"reverse",
"=",
"(",
"ldap3",
".",
"MODIFY_REPLACE",
",",
"tldap",
".",
"modlist",
".",
"escape_list",
"(",
"result",
"[",
"mod_type",
"]",
")",
")",
"else",
":",
"# except if we have no cached state for this DN, in",
"# which case we delete it.",
"reverse",
"=",
"(",
"ldap3",
".",
"MODIFY_DELETE",
",",
"[",
"]",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"mod_op of %d not supported\"",
"%",
"mod_op",
")",
"reverse",
"=",
"[",
"reverse",
"]",
"_debug",
"(",
"\"attribute reverse:\"",
",",
"reverse",
")",
"if",
"mod_type",
"in",
"result",
":",
"_debug",
"(",
"\"attribute cache:\"",
",",
"result",
"[",
"mod_type",
"]",
")",
"else",
":",
"_debug",
"(",
"\"attribute cache is empty\"",
")",
"revlist",
"[",
"mod_type",
"]",
"=",
"reverse",
"_debug",
"(",
"\"--\"",
")",
"_debug",
"(",
"\"mod_list:\"",
",",
"mod_list",
")",
"_debug",
"(",
"\"revlist:\"",
",",
"revlist",
")",
"_debug",
"(",
"\"--\"",
")",
"# now the hard stuff is over, we get to the easy stuff",
"def",
"on_commit",
"(",
"obj",
")",
":",
"obj",
".",
"modify",
"(",
"dn",
",",
"mod_list",
")",
"def",
"on_rollback",
"(",
"obj",
")",
":",
"obj",
".",
"modify",
"(",
"dn",
",",
"revlist",
")",
"return",
"self",
".",
"_process",
"(",
"on_commit",
",",
"on_rollback",
")"
] | Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled. | [
"Modify",
"a",
"DN",
"in",
"the",
"LDAP",
"database",
";",
"See",
"ldap",
"module",
".",
"Doesn",
"t",
"return",
"a",
"result",
"if",
"transactions",
"enabled",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L215-L294 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.modify_no_rollback | def modify_no_rollback(self, dn: str, mod_list: dict):
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify_no_rollback", self, dn, mod_list)
result = self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list))
_debug("--")
return result | python | def modify_no_rollback(self, dn: str, mod_list: dict):
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify_no_rollback", self, dn, mod_list)
result = self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list))
_debug("--")
return result | [
"def",
"modify_no_rollback",
"(",
"self",
",",
"dn",
":",
"str",
",",
"mod_list",
":",
"dict",
")",
":",
"_debug",
"(",
"\"modify_no_rollback\"",
",",
"self",
",",
"dn",
",",
"mod_list",
")",
"result",
"=",
"self",
".",
"_do_with_retry",
"(",
"lambda",
"obj",
":",
"obj",
".",
"modify_s",
"(",
"dn",
",",
"mod_list",
")",
")",
"_debug",
"(",
"\"--\"",
")",
"return",
"result"
] | Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled. | [
"Modify",
"a",
"DN",
"in",
"the",
"LDAP",
"database",
";",
"See",
"ldap",
"module",
".",
"Doesn",
"t",
"return",
"a",
"result",
"if",
"transactions",
"enabled",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L296-L306 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.delete | def delete(self, dn: str) -> None:
"""
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("delete", self)
# get copy of cache
result = self._cache_get_for_dn(dn)
# remove special values that can't be added
def delete_attribute(name):
if name in result:
del result[name]
delete_attribute('entryUUID')
delete_attribute('structuralObjectClass')
delete_attribute('modifiersName')
delete_attribute('subschemaSubentry')
delete_attribute('entryDN')
delete_attribute('modifyTimestamp')
delete_attribute('entryCSN')
delete_attribute('createTimestamp')
delete_attribute('creatorsName')
delete_attribute('hasSubordinates')
delete_attribute('pwdFailureTime')
delete_attribute('pwdChangedTime')
# turn into mod_list list.
mod_list = tldap.modlist.addModlist(result)
_debug("revlist:", mod_list)
# on commit carry out action; on rollback restore cached state
def on_commit(obj):
obj.delete(dn)
def on_rollback(obj):
obj.add(dn, None, mod_list)
return self._process(on_commit, on_rollback) | python | def delete(self, dn: str) -> None:
"""
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("delete", self)
# get copy of cache
result = self._cache_get_for_dn(dn)
# remove special values that can't be added
def delete_attribute(name):
if name in result:
del result[name]
delete_attribute('entryUUID')
delete_attribute('structuralObjectClass')
delete_attribute('modifiersName')
delete_attribute('subschemaSubentry')
delete_attribute('entryDN')
delete_attribute('modifyTimestamp')
delete_attribute('entryCSN')
delete_attribute('createTimestamp')
delete_attribute('creatorsName')
delete_attribute('hasSubordinates')
delete_attribute('pwdFailureTime')
delete_attribute('pwdChangedTime')
# turn into mod_list list.
mod_list = tldap.modlist.addModlist(result)
_debug("revlist:", mod_list)
# on commit carry out action; on rollback restore cached state
def on_commit(obj):
obj.delete(dn)
def on_rollback(obj):
obj.add(dn, None, mod_list)
return self._process(on_commit, on_rollback) | [
"def",
"delete",
"(",
"self",
",",
"dn",
":",
"str",
")",
"->",
"None",
":",
"_debug",
"(",
"\"delete\"",
",",
"self",
")",
"# get copy of cache",
"result",
"=",
"self",
".",
"_cache_get_for_dn",
"(",
"dn",
")",
"# remove special values that can't be added",
"def",
"delete_attribute",
"(",
"name",
")",
":",
"if",
"name",
"in",
"result",
":",
"del",
"result",
"[",
"name",
"]",
"delete_attribute",
"(",
"'entryUUID'",
")",
"delete_attribute",
"(",
"'structuralObjectClass'",
")",
"delete_attribute",
"(",
"'modifiersName'",
")",
"delete_attribute",
"(",
"'subschemaSubentry'",
")",
"delete_attribute",
"(",
"'entryDN'",
")",
"delete_attribute",
"(",
"'modifyTimestamp'",
")",
"delete_attribute",
"(",
"'entryCSN'",
")",
"delete_attribute",
"(",
"'createTimestamp'",
")",
"delete_attribute",
"(",
"'creatorsName'",
")",
"delete_attribute",
"(",
"'hasSubordinates'",
")",
"delete_attribute",
"(",
"'pwdFailureTime'",
")",
"delete_attribute",
"(",
"'pwdChangedTime'",
")",
"# turn into mod_list list.",
"mod_list",
"=",
"tldap",
".",
"modlist",
".",
"addModlist",
"(",
"result",
")",
"_debug",
"(",
"\"revlist:\"",
",",
"mod_list",
")",
"# on commit carry out action; on rollback restore cached state",
"def",
"on_commit",
"(",
"obj",
")",
":",
"obj",
".",
"delete",
"(",
"dn",
")",
"def",
"on_rollback",
"(",
"obj",
")",
":",
"obj",
".",
"add",
"(",
"dn",
",",
"None",
",",
"mod_list",
")",
"return",
"self",
".",
"_process",
"(",
"on_commit",
",",
"on_rollback",
")"
] | delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled. | [
"delete",
"a",
"dn",
"in",
"the",
"ldap",
"database",
";",
"see",
"ldap",
"module",
".",
"doesn",
"t",
"return",
"a",
"result",
"if",
"transactions",
"enabled",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L308-L347 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.rename | def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("rename", self, dn, new_rdn, new_base_dn)
# split up the parameters
split_dn = tldap.dn.str2dn(dn)
split_newrdn = tldap.dn.str2dn(new_rdn)
assert(len(split_newrdn) == 1)
# make dn unqualified
rdn = tldap.dn.dn2str(split_dn[0:1])
# make newrdn fully qualified dn
tmplist = [split_newrdn[0]]
if new_base_dn is not None:
tmplist.extend(tldap.dn.str2dn(new_base_dn))
old_base_dn = tldap.dn.dn2str(split_dn[1:])
else:
tmplist.extend(split_dn[1:])
old_base_dn = None
newdn = tldap.dn.dn2str(tmplist)
_debug("--> commit ", self, dn, new_rdn, new_base_dn)
_debug("--> rollback", self, newdn, rdn, old_base_dn)
# on commit carry out action; on rollback reverse rename
def on_commit(obj):
obj.modify_dn(dn, new_rdn, new_superior=new_base_dn)
def on_rollback(obj):
obj.modify_dn(newdn, rdn, new_superior=old_base_dn)
return self._process(on_commit, on_rollback) | python | def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("rename", self, dn, new_rdn, new_base_dn)
# split up the parameters
split_dn = tldap.dn.str2dn(dn)
split_newrdn = tldap.dn.str2dn(new_rdn)
assert(len(split_newrdn) == 1)
# make dn unqualified
rdn = tldap.dn.dn2str(split_dn[0:1])
# make newrdn fully qualified dn
tmplist = [split_newrdn[0]]
if new_base_dn is not None:
tmplist.extend(tldap.dn.str2dn(new_base_dn))
old_base_dn = tldap.dn.dn2str(split_dn[1:])
else:
tmplist.extend(split_dn[1:])
old_base_dn = None
newdn = tldap.dn.dn2str(tmplist)
_debug("--> commit ", self, dn, new_rdn, new_base_dn)
_debug("--> rollback", self, newdn, rdn, old_base_dn)
# on commit carry out action; on rollback reverse rename
def on_commit(obj):
obj.modify_dn(dn, new_rdn, new_superior=new_base_dn)
def on_rollback(obj):
obj.modify_dn(newdn, rdn, new_superior=old_base_dn)
return self._process(on_commit, on_rollback) | [
"def",
"rename",
"(",
"self",
",",
"dn",
":",
"str",
",",
"new_rdn",
":",
"str",
",",
"new_base_dn",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"None",
":",
"_debug",
"(",
"\"rename\"",
",",
"self",
",",
"dn",
",",
"new_rdn",
",",
"new_base_dn",
")",
"# split up the parameters",
"split_dn",
"=",
"tldap",
".",
"dn",
".",
"str2dn",
"(",
"dn",
")",
"split_newrdn",
"=",
"tldap",
".",
"dn",
".",
"str2dn",
"(",
"new_rdn",
")",
"assert",
"(",
"len",
"(",
"split_newrdn",
")",
"==",
"1",
")",
"# make dn unqualified",
"rdn",
"=",
"tldap",
".",
"dn",
".",
"dn2str",
"(",
"split_dn",
"[",
"0",
":",
"1",
"]",
")",
"# make newrdn fully qualified dn",
"tmplist",
"=",
"[",
"split_newrdn",
"[",
"0",
"]",
"]",
"if",
"new_base_dn",
"is",
"not",
"None",
":",
"tmplist",
".",
"extend",
"(",
"tldap",
".",
"dn",
".",
"str2dn",
"(",
"new_base_dn",
")",
")",
"old_base_dn",
"=",
"tldap",
".",
"dn",
".",
"dn2str",
"(",
"split_dn",
"[",
"1",
":",
"]",
")",
"else",
":",
"tmplist",
".",
"extend",
"(",
"split_dn",
"[",
"1",
":",
"]",
")",
"old_base_dn",
"=",
"None",
"newdn",
"=",
"tldap",
".",
"dn",
".",
"dn2str",
"(",
"tmplist",
")",
"_debug",
"(",
"\"--> commit \"",
",",
"self",
",",
"dn",
",",
"new_rdn",
",",
"new_base_dn",
")",
"_debug",
"(",
"\"--> rollback\"",
",",
"self",
",",
"newdn",
",",
"rdn",
",",
"old_base_dn",
")",
"# on commit carry out action; on rollback reverse rename",
"def",
"on_commit",
"(",
"obj",
")",
":",
"obj",
".",
"modify_dn",
"(",
"dn",
",",
"new_rdn",
",",
"new_superior",
"=",
"new_base_dn",
")",
"def",
"on_rollback",
"(",
"obj",
")",
":",
"obj",
".",
"modify_dn",
"(",
"newdn",
",",
"rdn",
",",
"new_superior",
"=",
"old_base_dn",
")",
"return",
"self",
".",
"_process",
"(",
"on_commit",
",",
"on_rollback",
")"
] | rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled. | [
"rename",
"a",
"dn",
"in",
"the",
"ldap",
"database",
";",
"see",
"ldap",
"module",
".",
"doesn",
"t",
"return",
"a",
"result",
"if",
"transactions",
"enabled",
"."
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L349-L385 |
Karaage-Cluster/python-tldap | tldap/backend/fake_transactions.py | LDAPwrapper.fail | def fail(self) -> None:
""" for testing purposes only. always fail in commit """
_debug("fail")
# on commit carry out action; on rollback reverse rename
def on_commit(_obj):
raise_testfailure("commit")
def on_rollback(_obj):
raise_testfailure("rollback")
return self._process(on_commit, on_rollback) | python | def fail(self) -> None:
""" for testing purposes only. always fail in commit """
_debug("fail")
# on commit carry out action; on rollback reverse rename
def on_commit(_obj):
raise_testfailure("commit")
def on_rollback(_obj):
raise_testfailure("rollback")
return self._process(on_commit, on_rollback) | [
"def",
"fail",
"(",
"self",
")",
"->",
"None",
":",
"_debug",
"(",
"\"fail\"",
")",
"# on commit carry out action; on rollback reverse rename",
"def",
"on_commit",
"(",
"_obj",
")",
":",
"raise_testfailure",
"(",
"\"commit\"",
")",
"def",
"on_rollback",
"(",
"_obj",
")",
":",
"raise_testfailure",
"(",
"\"rollback\"",
")",
"return",
"self",
".",
"_process",
"(",
"on_commit",
",",
"on_rollback",
")"
] | for testing purposes only. always fail in commit | [
"for",
"testing",
"purposes",
"only",
".",
"always",
"fail",
"in",
"commit"
] | train | https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/backend/fake_transactions.py#L387-L399 |
aroberge/experimental | experimental/transformers/int_seq.py | __experimental_range | def __experimental_range(start, stop, var, cond, loc={}):
'''Utility function made to reproduce range() with unit integer step
but with the added possibility of specifying a condition
on the looping variable (e.g. var % 2 == 0)
'''
locals().update(loc)
if start < stop:
for __ in range(start, stop):
locals()[var] = __
if eval(cond, globals(), locals()):
yield __
else:
for __ in range(start, stop, -1):
locals()[var] = __
if eval(cond, globals(), locals()):
yield __ | python | def __experimental_range(start, stop, var, cond, loc={}):
'''Utility function made to reproduce range() with unit integer step
but with the added possibility of specifying a condition
on the looping variable (e.g. var % 2 == 0)
'''
locals().update(loc)
if start < stop:
for __ in range(start, stop):
locals()[var] = __
if eval(cond, globals(), locals()):
yield __
else:
for __ in range(start, stop, -1):
locals()[var] = __
if eval(cond, globals(), locals()):
yield __ | [
"def",
"__experimental_range",
"(",
"start",
",",
"stop",
",",
"var",
",",
"cond",
",",
"loc",
"=",
"{",
"}",
")",
":",
"locals",
"(",
")",
".",
"update",
"(",
"loc",
")",
"if",
"start",
"<",
"stop",
":",
"for",
"__",
"in",
"range",
"(",
"start",
",",
"stop",
")",
":",
"locals",
"(",
")",
"[",
"var",
"]",
"=",
"__",
"if",
"eval",
"(",
"cond",
",",
"globals",
"(",
")",
",",
"locals",
"(",
")",
")",
":",
"yield",
"__",
"else",
":",
"for",
"__",
"in",
"range",
"(",
"start",
",",
"stop",
",",
"-",
"1",
")",
":",
"locals",
"(",
")",
"[",
"var",
"]",
"=",
"__",
"if",
"eval",
"(",
"cond",
",",
"globals",
"(",
")",
",",
"locals",
"(",
")",
")",
":",
"yield",
"__"
] | Utility function made to reproduce range() with unit integer step
but with the added possibility of specifying a condition
on the looping variable (e.g. var % 2 == 0) | [
"Utility",
"function",
"made",
"to",
"reproduce",
"range",
"()",
"with",
"unit",
"integer",
"step",
"but",
"with",
"the",
"added",
"possibility",
"of",
"specifying",
"a",
"condition",
"on",
"the",
"looping",
"variable",
"(",
"e",
".",
"g",
".",
"var",
"%",
"2",
"==",
"0",
")"
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/int_seq.py#L51-L66 |
aroberge/experimental | experimental/transformers/int_seq.py | create_for | def create_for(line, search_result):
'''Create a new "for loop" line as a replacement for the original code.
'''
try:
return line.format(search_result.group("indented_for"),
search_result.group("var"),
search_result.group("start"),
search_result.group("stop"),
search_result.group("cond"))
except IndexError:
return line.format(search_result.group("indented_for"),
search_result.group("var"),
search_result.group("start"),
search_result.group("stop")) | python | def create_for(line, search_result):
'''Create a new "for loop" line as a replacement for the original code.
'''
try:
return line.format(search_result.group("indented_for"),
search_result.group("var"),
search_result.group("start"),
search_result.group("stop"),
search_result.group("cond"))
except IndexError:
return line.format(search_result.group("indented_for"),
search_result.group("var"),
search_result.group("start"),
search_result.group("stop")) | [
"def",
"create_for",
"(",
"line",
",",
"search_result",
")",
":",
"try",
":",
"return",
"line",
".",
"format",
"(",
"search_result",
".",
"group",
"(",
"\"indented_for\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"var\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"start\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"stop\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"cond\"",
")",
")",
"except",
"IndexError",
":",
"return",
"line",
".",
"format",
"(",
"search_result",
".",
"group",
"(",
"\"indented_for\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"var\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"start\"",
")",
",",
"search_result",
".",
"group",
"(",
"\"stop\"",
")",
")"
] | Create a new "for loop" line as a replacement for the original code. | [
"Create",
"a",
"new",
"for",
"loop",
"line",
"as",
"a",
"replacement",
"for",
"the",
"original",
"code",
"."
] | train | https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/int_seq.py#L180-L193 |
davidblaisonneau-orange/foreman | foreman/itemSmartClassParameter.py | ItemSmartClassParameter.setOverrideValue | def setOverrideValue(self, attributes, hostName):
""" Function __setitem__
Set a parameter of a foreman object as a dict
@param key: The key to modify
@param attribute: The data
@return RETURN: The API result
"""
self['override'] = True
attrType = type(attributes)
if attrType is dict:
self['parameter_type'] = 'hash'
elif attrType is list:
self['parameter_type'] = 'array'
else:
self['parameter_type'] = 'string'
orv = self.getOverrideValueForHost(hostName)
if orv:
orv['value'] = attributes
return True
else:
return self.api.create('{}/{}/{}'.format(self.objName,
self.key,
'override_values'),
{"override_value":
{"match": "fqdn={}".format(hostName),
"value": attributes}}) | python | def setOverrideValue(self, attributes, hostName):
""" Function __setitem__
Set a parameter of a foreman object as a dict
@param key: The key to modify
@param attribute: The data
@return RETURN: The API result
"""
self['override'] = True
attrType = type(attributes)
if attrType is dict:
self['parameter_type'] = 'hash'
elif attrType is list:
self['parameter_type'] = 'array'
else:
self['parameter_type'] = 'string'
orv = self.getOverrideValueForHost(hostName)
if orv:
orv['value'] = attributes
return True
else:
return self.api.create('{}/{}/{}'.format(self.objName,
self.key,
'override_values'),
{"override_value":
{"match": "fqdn={}".format(hostName),
"value": attributes}}) | [
"def",
"setOverrideValue",
"(",
"self",
",",
"attributes",
",",
"hostName",
")",
":",
"self",
"[",
"'override'",
"]",
"=",
"True",
"attrType",
"=",
"type",
"(",
"attributes",
")",
"if",
"attrType",
"is",
"dict",
":",
"self",
"[",
"'parameter_type'",
"]",
"=",
"'hash'",
"elif",
"attrType",
"is",
"list",
":",
"self",
"[",
"'parameter_type'",
"]",
"=",
"'array'",
"else",
":",
"self",
"[",
"'parameter_type'",
"]",
"=",
"'string'",
"orv",
"=",
"self",
".",
"getOverrideValueForHost",
"(",
"hostName",
")",
"if",
"orv",
":",
"orv",
"[",
"'value'",
"]",
"=",
"attributes",
"return",
"True",
"else",
":",
"return",
"self",
".",
"api",
".",
"create",
"(",
"'{}/{}/{}'",
".",
"format",
"(",
"self",
".",
"objName",
",",
"self",
".",
"key",
",",
"'override_values'",
")",
",",
"{",
"\"override_value\"",
":",
"{",
"\"match\"",
":",
"\"fqdn={}\"",
".",
"format",
"(",
"hostName",
")",
",",
"\"value\"",
":",
"attributes",
"}",
"}",
")"
] | Function __setitem__
Set a parameter of a foreman object as a dict
@param key: The key to modify
@param attribute: The data
@return RETURN: The API result | [
"Function",
"__setitem__",
"Set",
"a",
"parameter",
"of",
"a",
"foreman",
"object",
"as",
"a",
"dict"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemSmartClassParameter.py#L87-L113 |
developersociety/django-glitter | glitter/reminders/models.py | Reminder.get_interval_timedelta | def get_interval_timedelta(self):
""" Spits out the timedelta in days. """
now_datetime = timezone.now()
current_month_days = monthrange(now_datetime.year, now_datetime.month)[1]
# Two weeks
if self.interval == reminders_choices.INTERVAL_2_WEEKS:
interval_timedelta = datetime.timedelta(days=14)
# One month
elif self.interval == reminders_choices.INTERVAL_ONE_MONTH:
interval_timedelta = datetime.timedelta(days=current_month_days)
# Three months
elif self.interval == reminders_choices.INTERVAL_THREE_MONTHS:
three_months = now_datetime + relativedelta(months=+3)
interval_timedelta = three_months - now_datetime
# Six months
elif self.interval == reminders_choices.INTERVAL_SIX_MONTHS:
six_months = now_datetime + relativedelta(months=+6)
interval_timedelta = six_months - now_datetime
# One year
elif self.interval == reminders_choices.INTERVAL_ONE_YEAR:
one_year = now_datetime + relativedelta(years=+1)
interval_timedelta = one_year - now_datetime
return interval_timedelta | python | def get_interval_timedelta(self):
""" Spits out the timedelta in days. """
now_datetime = timezone.now()
current_month_days = monthrange(now_datetime.year, now_datetime.month)[1]
# Two weeks
if self.interval == reminders_choices.INTERVAL_2_WEEKS:
interval_timedelta = datetime.timedelta(days=14)
# One month
elif self.interval == reminders_choices.INTERVAL_ONE_MONTH:
interval_timedelta = datetime.timedelta(days=current_month_days)
# Three months
elif self.interval == reminders_choices.INTERVAL_THREE_MONTHS:
three_months = now_datetime + relativedelta(months=+3)
interval_timedelta = three_months - now_datetime
# Six months
elif self.interval == reminders_choices.INTERVAL_SIX_MONTHS:
six_months = now_datetime + relativedelta(months=+6)
interval_timedelta = six_months - now_datetime
# One year
elif self.interval == reminders_choices.INTERVAL_ONE_YEAR:
one_year = now_datetime + relativedelta(years=+1)
interval_timedelta = one_year - now_datetime
return interval_timedelta | [
"def",
"get_interval_timedelta",
"(",
"self",
")",
":",
"now_datetime",
"=",
"timezone",
".",
"now",
"(",
")",
"current_month_days",
"=",
"monthrange",
"(",
"now_datetime",
".",
"year",
",",
"now_datetime",
".",
"month",
")",
"[",
"1",
"]",
"# Two weeks",
"if",
"self",
".",
"interval",
"==",
"reminders_choices",
".",
"INTERVAL_2_WEEKS",
":",
"interval_timedelta",
"=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"14",
")",
"# One month",
"elif",
"self",
".",
"interval",
"==",
"reminders_choices",
".",
"INTERVAL_ONE_MONTH",
":",
"interval_timedelta",
"=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"current_month_days",
")",
"# Three months",
"elif",
"self",
".",
"interval",
"==",
"reminders_choices",
".",
"INTERVAL_THREE_MONTHS",
":",
"three_months",
"=",
"now_datetime",
"+",
"relativedelta",
"(",
"months",
"=",
"+",
"3",
")",
"interval_timedelta",
"=",
"three_months",
"-",
"now_datetime",
"# Six months",
"elif",
"self",
".",
"interval",
"==",
"reminders_choices",
".",
"INTERVAL_SIX_MONTHS",
":",
"six_months",
"=",
"now_datetime",
"+",
"relativedelta",
"(",
"months",
"=",
"+",
"6",
")",
"interval_timedelta",
"=",
"six_months",
"-",
"now_datetime",
"# One year",
"elif",
"self",
".",
"interval",
"==",
"reminders_choices",
".",
"INTERVAL_ONE_YEAR",
":",
"one_year",
"=",
"now_datetime",
"+",
"relativedelta",
"(",
"years",
"=",
"+",
"1",
")",
"interval_timedelta",
"=",
"one_year",
"-",
"now_datetime",
"return",
"interval_timedelta"
] | Spits out the timedelta in days. | [
"Spits",
"out",
"the",
"timedelta",
"in",
"days",
"."
] | train | https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/reminders/models.py#L34-L63 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_watcher.py | awaitable_runner | async def awaitable_runner(runner: BaseRunner):
"""Execute a runner without blocking the event loop"""
runner_thread = CapturingThread(target=runner.run)
runner_thread.start()
delay = 0.0
while not runner_thread.join(timeout=0):
await asyncio.sleep(delay)
delay = min(delay + 0.1, 1.0) | python | async def awaitable_runner(runner: BaseRunner):
"""Execute a runner without blocking the event loop"""
runner_thread = CapturingThread(target=runner.run)
runner_thread.start()
delay = 0.0
while not runner_thread.join(timeout=0):
await asyncio.sleep(delay)
delay = min(delay + 0.1, 1.0) | [
"async",
"def",
"awaitable_runner",
"(",
"runner",
":",
"BaseRunner",
")",
":",
"runner_thread",
"=",
"CapturingThread",
"(",
"target",
"=",
"runner",
".",
"run",
")",
"runner_thread",
".",
"start",
"(",
")",
"delay",
"=",
"0.0",
"while",
"not",
"runner_thread",
".",
"join",
"(",
"timeout",
"=",
"0",
")",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"delay",
")",
"delay",
"=",
"min",
"(",
"delay",
"+",
"0.1",
",",
"1.0",
")"
] | Execute a runner without blocking the event loop | [
"Execute",
"a",
"runner",
"without",
"blocking",
"the",
"event",
"loop"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_watcher.py#L9-L16 |
MatterMiners/cobald | cobald/daemon/runners/asyncio_watcher.py | asyncio_main_run | def asyncio_main_run(root_runner: BaseRunner):
"""
Create an ``asyncio`` event loop running in the main thread and watching runners
Using ``asyncio`` to handle suprocesses requires a specific loop type to run in the main thread.
This function sets up and runs the correct loop in a portable way.
In addition, it runs a single :py:class:`~.BaseRunner` until completion or failure.
.. seealso:: The `issue #8 <https://github.com/MatterMiners/cobald/issues/8>`_ for details.
"""
assert threading.current_thread() == threading.main_thread(), 'only main thread can accept asyncio subprocesses'
if sys.platform == 'win32':
event_loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(event_loop)
else:
event_loop = asyncio.get_event_loop()
asyncio.get_child_watcher().attach_loop(event_loop)
event_loop.run_until_complete(awaitable_runner(root_runner)) | python | def asyncio_main_run(root_runner: BaseRunner):
"""
Create an ``asyncio`` event loop running in the main thread and watching runners
Using ``asyncio`` to handle suprocesses requires a specific loop type to run in the main thread.
This function sets up and runs the correct loop in a portable way.
In addition, it runs a single :py:class:`~.BaseRunner` until completion or failure.
.. seealso:: The `issue #8 <https://github.com/MatterMiners/cobald/issues/8>`_ for details.
"""
assert threading.current_thread() == threading.main_thread(), 'only main thread can accept asyncio subprocesses'
if sys.platform == 'win32':
event_loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(event_loop)
else:
event_loop = asyncio.get_event_loop()
asyncio.get_child_watcher().attach_loop(event_loop)
event_loop.run_until_complete(awaitable_runner(root_runner)) | [
"def",
"asyncio_main_run",
"(",
"root_runner",
":",
"BaseRunner",
")",
":",
"assert",
"threading",
".",
"current_thread",
"(",
")",
"==",
"threading",
".",
"main_thread",
"(",
")",
",",
"'only main thread can accept asyncio subprocesses'",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
":",
"event_loop",
"=",
"asyncio",
".",
"ProactorEventLoop",
"(",
")",
"asyncio",
".",
"set_event_loop",
"(",
"event_loop",
")",
"else",
":",
"event_loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"asyncio",
".",
"get_child_watcher",
"(",
")",
".",
"attach_loop",
"(",
"event_loop",
")",
"event_loop",
".",
"run_until_complete",
"(",
"awaitable_runner",
"(",
"root_runner",
")",
")"
] | Create an ``asyncio`` event loop running in the main thread and watching runners
Using ``asyncio`` to handle suprocesses requires a specific loop type to run in the main thread.
This function sets up and runs the correct loop in a portable way.
In addition, it runs a single :py:class:`~.BaseRunner` until completion or failure.
.. seealso:: The `issue #8 <https://github.com/MatterMiners/cobald/issues/8>`_ for details. | [
"Create",
"an",
"asyncio",
"event",
"loop",
"running",
"in",
"the",
"main",
"thread",
"and",
"watching",
"runners"
] | train | https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/asyncio_watcher.py#L19-L36 |
davidblaisonneau-orange/foreman | foreman/itemConfigTemplate.py | ItemConfigTemplate.enhance | def enhance(self):
""" Function enhance
Enhance the object with new item or enhanced items
"""
self.update({'os_default_templates':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemOsDefaultTemplate)})
self.update({'operatingsystems':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemOperatingSystem)}) | python | def enhance(self):
""" Function enhance
Enhance the object with new item or enhanced items
"""
self.update({'os_default_templates':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemOsDefaultTemplate)})
self.update({'operatingsystems':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
SubItemOperatingSystem)}) | [
"def",
"enhance",
"(",
"self",
")",
":",
"self",
".",
"update",
"(",
"{",
"'os_default_templates'",
":",
"SubDict",
"(",
"self",
".",
"api",
",",
"self",
".",
"objName",
",",
"self",
".",
"payloadObj",
",",
"self",
".",
"key",
",",
"SubItemOsDefaultTemplate",
")",
"}",
")",
"self",
".",
"update",
"(",
"{",
"'operatingsystems'",
":",
"SubDict",
"(",
"self",
".",
"api",
",",
"self",
".",
"objName",
",",
"self",
".",
"payloadObj",
",",
"self",
".",
"key",
",",
"SubItemOperatingSystem",
")",
"}",
")"
] | Function enhance
Enhance the object with new item or enhanced items | [
"Function",
"enhance",
"Enhance",
"the",
"object",
"with",
"new",
"item",
"or",
"enhanced",
"items"
] | train | https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemConfigTemplate.py#L37-L48 |
qubell/contrib-python-qubell-client | qubell/api/tools/__init__.py | retry | def retry(tries=10, delay=1, backoff=2, retry_exception=None):
"""
Retry "tries" times, with initial "delay", increasing delay "delay*backoff" each time.
Without exception success means when function returns valid object.
With exception success when no exceptions
"""
assert tries > 0, "tries must be 1 or greater"
catching_mode = bool(retry_exception)
def deco_retry(f):
@functools.wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 0:
time.sleep(mdelay)
mdelay *= backoff
try:
rv = f(*args, **kwargs)
if not catching_mode and rv:
return rv
except retry_exception:
pass
else:
if catching_mode:
return rv
mtries -= 1
if mtries is 0 and not catching_mode:
return False
if mtries is 0 and catching_mode:
return f(*args, **kwargs) # extra try, to avoid except-raise syntax
log.debug("{0} try, sleeping for {1} sec".format(tries-mtries, mdelay))
raise Exception("unreachable code")
return f_retry
return deco_retry | python | def retry(tries=10, delay=1, backoff=2, retry_exception=None):
"""
Retry "tries" times, with initial "delay", increasing delay "delay*backoff" each time.
Without exception success means when function returns valid object.
With exception success when no exceptions
"""
assert tries > 0, "tries must be 1 or greater"
catching_mode = bool(retry_exception)
def deco_retry(f):
@functools.wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 0:
time.sleep(mdelay)
mdelay *= backoff
try:
rv = f(*args, **kwargs)
if not catching_mode and rv:
return rv
except retry_exception:
pass
else:
if catching_mode:
return rv
mtries -= 1
if mtries is 0 and not catching_mode:
return False
if mtries is 0 and catching_mode:
return f(*args, **kwargs) # extra try, to avoid except-raise syntax
log.debug("{0} try, sleeping for {1} sec".format(tries-mtries, mdelay))
raise Exception("unreachable code")
return f_retry
return deco_retry | [
"def",
"retry",
"(",
"tries",
"=",
"10",
",",
"delay",
"=",
"1",
",",
"backoff",
"=",
"2",
",",
"retry_exception",
"=",
"None",
")",
":",
"assert",
"tries",
">",
"0",
",",
"\"tries must be 1 or greater\"",
"catching_mode",
"=",
"bool",
"(",
"retry_exception",
")",
"def",
"deco_retry",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"f_retry",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"mtries",
",",
"mdelay",
"=",
"tries",
",",
"delay",
"while",
"mtries",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"mdelay",
")",
"mdelay",
"*=",
"backoff",
"try",
":",
"rv",
"=",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"catching_mode",
"and",
"rv",
":",
"return",
"rv",
"except",
"retry_exception",
":",
"pass",
"else",
":",
"if",
"catching_mode",
":",
"return",
"rv",
"mtries",
"-=",
"1",
"if",
"mtries",
"is",
"0",
"and",
"not",
"catching_mode",
":",
"return",
"False",
"if",
"mtries",
"is",
"0",
"and",
"catching_mode",
":",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# extra try, to avoid except-raise syntax",
"log",
".",
"debug",
"(",
"\"{0} try, sleeping for {1} sec\"",
".",
"format",
"(",
"tries",
"-",
"mtries",
",",
"mdelay",
")",
")",
"raise",
"Exception",
"(",
"\"unreachable code\"",
")",
"return",
"f_retry",
"return",
"deco_retry"
] | Retry "tries" times, with initial "delay", increasing delay "delay*backoff" each time.
Without exception success means when function returns valid object.
With exception success when no exceptions | [
"Retry",
"tries",
"times",
"with",
"initial",
"delay",
"increasing",
"delay",
"delay",
"*",
"backoff",
"each",
"time",
".",
"Without",
"exception",
"success",
"means",
"when",
"function",
"returns",
"valid",
"object",
".",
"With",
"exception",
"success",
"when",
"no",
"exceptions"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L37-L71 |
qubell/contrib-python-qubell-client | qubell/api/tools/__init__.py | dump | def dump(node):
""" Dump initialized object structure to yaml
"""
from qubell.api.private.platform import Auth, QubellPlatform
from qubell.api.private.organization import Organization
from qubell.api.private.application import Application
from qubell.api.private.instance import Instance
from qubell.api.private.revision import Revision
from qubell.api.private.environment import Environment
from qubell.api.private.zone import Zone
from qubell.api.private.manifest import Manifest
# Exclude keys from dump
# Format: { 'ClassName': ['fields', 'to', 'exclude']}
exclusion_list = {
Auth: ['cookies'],
QubellPlatform:['auth', ],
Organization: ['auth', 'organizationId', 'zone'],
Application: ['auth', 'applicationId', 'organization'],
Instance: ['auth', 'instanceId', 'application'],
Manifest: ['name', 'content'],
Revision: ['auth', 'revisionId'],
Environment: ['auth', 'environmentId', 'organization'],
Zone: ['auth', 'zoneId', 'organization'],
}
def obj_presenter(dumper, obj):
for x in exclusion_list.keys():
if isinstance(obj, x): # Find class
fields = obj.__dict__.copy()
for excl_item in exclusion_list[x]:
try:
fields.pop(excl_item)
except:
log.warn('No item %s in object %s' % (excl_item, x))
return dumper.represent_mapping('tag:yaml.org,2002:map', fields)
return dumper.represent_mapping('tag:yaml.org,2002:map', obj.__dict__)
noalias_dumper = yaml.dumper.Dumper
noalias_dumper.ignore_aliases = lambda self, data: True
yaml.add_representer(unicode, lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
yaml.add_multi_representer(object, obj_presenter)
serialized = yaml.dump(node, default_flow_style=False, Dumper=noalias_dumper)
return serialized | python | def dump(node):
""" Dump initialized object structure to yaml
"""
from qubell.api.private.platform import Auth, QubellPlatform
from qubell.api.private.organization import Organization
from qubell.api.private.application import Application
from qubell.api.private.instance import Instance
from qubell.api.private.revision import Revision
from qubell.api.private.environment import Environment
from qubell.api.private.zone import Zone
from qubell.api.private.manifest import Manifest
# Exclude keys from dump
# Format: { 'ClassName': ['fields', 'to', 'exclude']}
exclusion_list = {
Auth: ['cookies'],
QubellPlatform:['auth', ],
Organization: ['auth', 'organizationId', 'zone'],
Application: ['auth', 'applicationId', 'organization'],
Instance: ['auth', 'instanceId', 'application'],
Manifest: ['name', 'content'],
Revision: ['auth', 'revisionId'],
Environment: ['auth', 'environmentId', 'organization'],
Zone: ['auth', 'zoneId', 'organization'],
}
def obj_presenter(dumper, obj):
for x in exclusion_list.keys():
if isinstance(obj, x): # Find class
fields = obj.__dict__.copy()
for excl_item in exclusion_list[x]:
try:
fields.pop(excl_item)
except:
log.warn('No item %s in object %s' % (excl_item, x))
return dumper.represent_mapping('tag:yaml.org,2002:map', fields)
return dumper.represent_mapping('tag:yaml.org,2002:map', obj.__dict__)
noalias_dumper = yaml.dumper.Dumper
noalias_dumper.ignore_aliases = lambda self, data: True
yaml.add_representer(unicode, lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:str', value))
yaml.add_multi_representer(object, obj_presenter)
serialized = yaml.dump(node, default_flow_style=False, Dumper=noalias_dumper)
return serialized | [
"def",
"dump",
"(",
"node",
")",
":",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"platform",
"import",
"Auth",
",",
"QubellPlatform",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"organization",
"import",
"Organization",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"application",
"import",
"Application",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"instance",
"import",
"Instance",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"revision",
"import",
"Revision",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"environment",
"import",
"Environment",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"zone",
"import",
"Zone",
"from",
"qubell",
".",
"api",
".",
"private",
".",
"manifest",
"import",
"Manifest",
"# Exclude keys from dump",
"# Format: { 'ClassName': ['fields', 'to', 'exclude']}",
"exclusion_list",
"=",
"{",
"Auth",
":",
"[",
"'cookies'",
"]",
",",
"QubellPlatform",
":",
"[",
"'auth'",
",",
"]",
",",
"Organization",
":",
"[",
"'auth'",
",",
"'organizationId'",
",",
"'zone'",
"]",
",",
"Application",
":",
"[",
"'auth'",
",",
"'applicationId'",
",",
"'organization'",
"]",
",",
"Instance",
":",
"[",
"'auth'",
",",
"'instanceId'",
",",
"'application'",
"]",
",",
"Manifest",
":",
"[",
"'name'",
",",
"'content'",
"]",
",",
"Revision",
":",
"[",
"'auth'",
",",
"'revisionId'",
"]",
",",
"Environment",
":",
"[",
"'auth'",
",",
"'environmentId'",
",",
"'organization'",
"]",
",",
"Zone",
":",
"[",
"'auth'",
",",
"'zoneId'",
",",
"'organization'",
"]",
",",
"}",
"def",
"obj_presenter",
"(",
"dumper",
",",
"obj",
")",
":",
"for",
"x",
"in",
"exclusion_list",
".",
"keys",
"(",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"x",
")",
":",
"# Find class",
"fields",
"=",
"obj",
".",
"__dict__",
".",
"copy",
"(",
")",
"for",
"excl_item",
"in",
"exclusion_list",
"[",
"x",
"]",
":",
"try",
":",
"fields",
".",
"pop",
"(",
"excl_item",
")",
"except",
":",
"log",
".",
"warn",
"(",
"'No item %s in object %s'",
"%",
"(",
"excl_item",
",",
"x",
")",
")",
"return",
"dumper",
".",
"represent_mapping",
"(",
"'tag:yaml.org,2002:map'",
",",
"fields",
")",
"return",
"dumper",
".",
"represent_mapping",
"(",
"'tag:yaml.org,2002:map'",
",",
"obj",
".",
"__dict__",
")",
"noalias_dumper",
"=",
"yaml",
".",
"dumper",
".",
"Dumper",
"noalias_dumper",
".",
"ignore_aliases",
"=",
"lambda",
"self",
",",
"data",
":",
"True",
"yaml",
".",
"add_representer",
"(",
"unicode",
",",
"lambda",
"dumper",
",",
"value",
":",
"dumper",
".",
"represent_scalar",
"(",
"u'tag:yaml.org,2002:str'",
",",
"value",
")",
")",
"yaml",
".",
"add_multi_representer",
"(",
"object",
",",
"obj_presenter",
")",
"serialized",
"=",
"yaml",
".",
"dump",
"(",
"node",
",",
"default_flow_style",
"=",
"False",
",",
"Dumper",
"=",
"noalias_dumper",
")",
"return",
"serialized"
] | Dump initialized object structure to yaml | [
"Dump",
"initialized",
"object",
"structure",
"to",
"yaml"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L145-L191 |
qubell/contrib-python-qubell-client | qubell/api/tools/__init__.py | load_env | def load_env(file):
"""
Generate environment used for 'org.restore' method
:param file: env file
:return: env
"""
env = yaml.load(open(file))
for org in env.get('organizations', []):
if not org.get('applications'):
org['applications'] = []
if org.get('starter-kit'):
kit_meta = get_starter_kit_meta(org.get('starter-kit'))
for meta_app in get_applications_from_metadata(kit_meta):
org['applications'].append(meta_app)
if org.get('meta'):
for meta_app in get_applications_from_metadata(org.get('meta')):
org['applications'].append(meta_app)
for app in org.get('applications', []):
if app.get('file'):
app['file'] = os.path.realpath(os.path.join(os.path.dirname(file), app['file']))
return env | python | def load_env(file):
"""
Generate environment used for 'org.restore' method
:param file: env file
:return: env
"""
env = yaml.load(open(file))
for org in env.get('organizations', []):
if not org.get('applications'):
org['applications'] = []
if org.get('starter-kit'):
kit_meta = get_starter_kit_meta(org.get('starter-kit'))
for meta_app in get_applications_from_metadata(kit_meta):
org['applications'].append(meta_app)
if org.get('meta'):
for meta_app in get_applications_from_metadata(org.get('meta')):
org['applications'].append(meta_app)
for app in org.get('applications', []):
if app.get('file'):
app['file'] = os.path.realpath(os.path.join(os.path.dirname(file), app['file']))
return env | [
"def",
"load_env",
"(",
"file",
")",
":",
"env",
"=",
"yaml",
".",
"load",
"(",
"open",
"(",
"file",
")",
")",
"for",
"org",
"in",
"env",
".",
"get",
"(",
"'organizations'",
",",
"[",
"]",
")",
":",
"if",
"not",
"org",
".",
"get",
"(",
"'applications'",
")",
":",
"org",
"[",
"'applications'",
"]",
"=",
"[",
"]",
"if",
"org",
".",
"get",
"(",
"'starter-kit'",
")",
":",
"kit_meta",
"=",
"get_starter_kit_meta",
"(",
"org",
".",
"get",
"(",
"'starter-kit'",
")",
")",
"for",
"meta_app",
"in",
"get_applications_from_metadata",
"(",
"kit_meta",
")",
":",
"org",
"[",
"'applications'",
"]",
".",
"append",
"(",
"meta_app",
")",
"if",
"org",
".",
"get",
"(",
"'meta'",
")",
":",
"for",
"meta_app",
"in",
"get_applications_from_metadata",
"(",
"org",
".",
"get",
"(",
"'meta'",
")",
")",
":",
"org",
"[",
"'applications'",
"]",
".",
"append",
"(",
"meta_app",
")",
"for",
"app",
"in",
"org",
".",
"get",
"(",
"'applications'",
",",
"[",
"]",
")",
":",
"if",
"app",
".",
"get",
"(",
"'file'",
")",
":",
"app",
"[",
"'file'",
"]",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"file",
")",
",",
"app",
"[",
"'file'",
"]",
")",
")",
"return",
"env"
] | Generate environment used for 'org.restore' method
:param file: env file
:return: env | [
"Generate",
"environment",
"used",
"for",
"org",
".",
"restore",
"method",
":",
"param",
"file",
":",
"env",
"file",
":",
"return",
":",
"env"
] | train | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/tools/__init__.py#L198-L223 |