wbia.dtool package

Submodules

wbia.dtool.base module

class wbia.dtool.base.AlgoResult[source]

Bases: object

Base class for algo result objects

copy()[source]
classmethod load_from_fpath(fpath, verbose=False)[source]
save_to_fpath(fpath, verbose=False)[source]
class wbia.dtool.base.AnnotSimiliarity[source]

Bases: object

get_data_hashid()[source]
get_query_hashid()[source]
class wbia.dtool.base.BaseRequest[source]

Bases: wbia.dtool.base.IBEISRequestHacks, utool.util_dev.NiceRepr

Class that maintains both an algorithm, inputs, and a config.

ensure_dependencies()[source]
CommandLine:

python -m dtool.base –exec-BaseRequest.ensure_dependencies

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> request = depc.new_request('vsmany', [1, 2], [2, 3, 4])
>>> request.ensure_dependencies()
execute(parent_rowids=None, use_cache=None, postprocess=True)[source]
get_cfgstr(with_input=False, with_pipe=True, **kwargs)[source]

main cfgstring used to identify the ‘querytype’

get_input_hashid()[source]
get_pipe_cfgstr()[source]
get_pipe_hashid()[source]
classmethod new(depc, parent_rowids, cfgdict=None, tablename=None)[source]
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

static static_new(cls, depc, parent_rowids, cfgdict=None, tablename=None)[source]

hack for autoreload

class wbia.dtool.base.ClassVsClassSimilarityRequest[source]

Bases: wbia.dtool.base.BaseRequest

rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

class wbia.dtool.base.Config(**kwargs)[source]

Bases: utool.util_dev.NiceRepr, utool.util_dict.DictLike

Base class for heirarchical config need to overwrite get_param_info_list

CommandLine:

python -m dtool.base Config

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> cfg1 = Config.from_dict({'a': 1, 'b': 2})
>>> cfg2 = Config.from_dict({'a': 2, 'b': 2})
>>> # Must be hashable and orderable
>>> hash(cfg1)
>>> cfg1 > cfg2
assert_self_types(verbose=True)[source]
classmethod class_from_dict(dict_, tablename=None)[source]
deepcopy()[source]
classmethod from_argv_cfgs()[source]

handy command line tool

classmethod from_argv_dict(**kwargs)[source]

handy command line tool ut.parse_argv_cfg

classmethod from_dict(dict_, tablename=None)[source]
Parameters
  • dict (dict_) – a dictionary

  • tablename (None) – (default = None)

Returns

param_info_list

Return type

list

CommandLine:

python -m dtool.base Config.from_dict –show

Example

>>> # DISABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> cls = Config
>>> dict_ = {'K': 1, 'Knorm': 5, 'min_pername': 1, 'max_pername': 1,}
>>> tablename = None
>>> config = cls.from_dict(dict_, tablename)
>>> print(config)
>>> # xdoctest: +REQUIRES(--show)
>>> ut.quit_if_noshow()
>>> dlg = config.make_qt_dialog(
>>>     title='Confirm Merge Query',
>>>     msg='Confirm')
>>> dlg.resize(700, 500)
>>> dlg.show()
>>> import wbia.plottool as pt
>>> self = dlg.widget
>>> guitool.qtapp_loop(qwin=dlg)
>>> updated_config = self.config  # NOQA
>>> print('updated_config = %r' % (updated_config,))
get(key, *d)[source]

get a paramater value by string

get_cfgstr(**kwargs)[source]
get_cfgstr_list(ignore_keys=None, with_name=True, **kwargs)[source]

default get_cfgstr_list, can be overrided by a config object

get_config_name(**kwargs)[source]

the user might want to overwrite this function

get_hashid()[source]
get_param_info_dict()[source]
get_param_info_list()[source]
get_sub_config_list()[source]
get_varnames()[source]
getinfo(key)[source]
getitem(key)[source]

Required for DictLike interface

getstate_todict_recursive()[source]
initialize_params(**kwargs)[source]

Initializes config class attributes based on params info list

keys()[source]

Required for DictLike interface

make_qt_dialog(parent=None, title='Edit Config', msg='Confim')[source]
native_items()[source]
nested_items()[source]
parse_items()[source]
Returns

param_list

Return type

list

CommandLine:

python -m dtool.base –exec-parse_items

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia.dtool.example_depcache import DummyVsManyConfig
>>> cfg = DummyVsManyConfig()
>>> param_list = cfg.parse_items()
>>> result = ('param_list = %s' % (ut.repr2(param_list, nl=1),))
>>> print(result)
parse_namespace_config_items()[source]

Recursively extracts key, val pairs from Config objects into a flat list. (there must not be name conflicts)

pop_update(other)[source]

Updates based on other, while popping off used arguments. (useful for testing if a parameter was unused or misspelled)

Doctest:
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia import dtool as dt
>>> cfg = dt.Config.from_dict({'a': 1, 'b': 2, 'c': 3})
>>> other = {'a': 5, 'e': 2}
>>> cfg.pop_update(other)
>>> assert cfg['a'] == 5
>>> assert len(other) == 1 and 'a' not in other
setitem(key, value)[source]

Required for DictLike interface

update(**kwargs)[source]

Overwrites default DictLike update for only keys that exist. Non-existing key are ignored.

Note

prefixed keys in the form <classname>_<key> will be just be interpreted as <key>

CommandLine:

python -m dtool.base update –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia.dtool.example_depcache import DummyVsManyConfig
>>> cfg = DummyVsManyConfig()
>>> cfg.update(DummyAlgo_version=4)
>>> print(cfg)
update2(*args, **kwargs)[source]

Overwrites default DictLike update for only keys that exist. Non-existing key are ignored. Also updates nested configs.

Note

prefixed keys in the form <classname>_<key> will be just be interpreted as <key>

CommandLine:

python -m dtool.base update –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia import dtool as dt
>>> cfg = dt.Config.from_dict({
>>>     'a': 1,
>>>     'b': 2,
>>>     'c': 3,
>>>     'sub1': dt.Config.from_dict({
>>>         'x': 'x',
>>>         'y': {'z', 'x'},
>>>         'c': 33,
>>>     }),
>>>     'sub2': dt.Config.from_dict({
>>>         's': [1, 2, 3],
>>>         't': (1, 2, 3),
>>>         'c': 42,
>>>         'sub3': dt.Config.from_dict({
>>>             'b': 99,
>>>             'c': 88,
>>>         }),
>>>     }),
>>> })
>>> kwargs = {'c': 10}
>>> cfg.update2(c=10, y={1,2})
>>> assert cfg.c == 10
>>> assert cfg.sub1.c == 10
>>> assert cfg.sub2.c == 10
>>> assert cfg.sub2.sub3.c == 10
>>> assert cfg.sub1.y == {1, 2}
class wbia.dtool.base.IBEISRequestHacks[source]

Bases: object

property dannots
property extern_data_config2
property extern_query_config2
get_qreq_annot_nids(aids)[source]
property ibs

HACK specific to wbia

property qannots
class wbia.dtool.base.MatchResult(qaid=None, daids=None, qnid=None, dnid_list=None, annot_score_list=None, unique_nids=None, name_score_list=None)[source]

Bases: wbia.dtool.base.AlgoResult, utool.util_dev.NiceRepr

property daids
property num_daids
property qaids
class wbia.dtool.base.StackedConfig(config_list)[source]

Bases: utool.util_dict.DictLike, utool.util_class.HashComparable

Manages a list of configurations

get_cfgstr()[source]
getitem(key)[source]
keys()[source]
class wbia.dtool.base.VsManySimilarityRequest[source]

Bases: wbia.dtool.base.BaseRequest, wbia.dtool.base.AnnotSimiliarity

Request for one-vs-many simlarity

CommandLine:

python -m dtool.base –exec-VsManySimilarityRequest

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> qaid_list = [1, 2]
>>> daid_list = [2, 3, 4]
>>> depc = testdata_depc()
>>> request = depc.new_request('vsmany', qaid_list, daid_list)
>>> request.ensure_dependencies()
>>> results = request.execute()
>>> # Test dependence on data
>>> request2 = depc.new_request('vsmany', qaid_list + [3], daid_list + [5])
>>> results2 = request2.execute()
>>> print('results = %r' % (results,))
>>> print('results2 = %r' % (results2,))
>>> assert len(results) == 2, 'incorrect num output'
>>> assert len(results2) == 3, 'incorrect num output'
get_cfgstr(with_input=False, with_data=True, with_pipe=True, hash_pipe=False)[source]

Override default get_cfgstr to show reliance on data

get_input_hashid()[source]
classmethod new(depc, qaid_list, daid_list, cfgdict=None, tablename=None)[source]
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

class wbia.dtool.base.VsOneSimilarityRequest[source]

Bases: wbia.dtool.base.BaseRequest, wbia.dtool.base.AnnotSimiliarity

Similarity request for pairwise scores

References

https://thingspython.wordpress.com/2010/09/27/ another-super-wrinkle-raising-typeerror/

CommandLine:

python -m dtool.base –exec-VsOneSimilarityRequest

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.base import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> qaid_list = [1, 2, 3, 5]
>>> daid_list = [2, 3, 4]
>>> depc = testdata_depc()
>>> request = depc.new_request('vsone', qaid_list, daid_list)
>>> results = request.execute()
>>> # Test that adding a query / data id only recomputes necessary items
>>> request2 = depc.new_request('vsone', qaid_list + [4], daid_list + [5])
>>> results2 = request2.execute()
>>> print('results = %r' % (results,))
>>> print('results2 = %r' % (results2,))
>>> ut.assert_eq(len(results), 10, 'incorrect num output')
>>> ut.assert_eq(len(results2), 16, 'incorrect num output')
execute(parent_rowids=None, use_cache=None, postprocess=True, **kwargs)[source]

HACKY REIMPLEMENTATION

get_input_hashid()[source]
static make_parent_rowids(qaid_list, daid_list)[source]
classmethod new(depc, qaid_list, daid_list, cfgdict=None, tablename=None)[source]
property parent_rowids_T
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

wbia.dtool.base.config_graph_subattrs(cfg, depc)[source]
wbia.dtool.base.from_param_info_list(param_info_list, tablename='Unnamed')[source]
wbia.dtool.base.make_configclass(dict_, tablename)[source]

Creates a custom config class from a dict

wbia.dtool.base.safeop(op_, xs, *args, **kwargs)[source]

wbia.dtool.depcache_control module

implicit version of dependency cache from wbia/templates/template_generator

class wbia.dtool.depcache_control.DependencyCache(controller, name, get_root_uuid, table_name=None, root_getters=None, use_globals=True)[source]

Bases: object

check_rowids(tablename, input_tuple, config={})[source]

Returns a list of flags where True means the row has been computed and False means that it needs to be computed.

clear_all()[source]
close()[source]

Close all managed SQL databases

delete_property(tablename, root_rowids, config=None, _debug=False)[source]

Deletes the rowids of tablename that correspond to root_rowids using config.

FIXME: make this work for all configs

delete_property_all(tablename, root_rowids, _debug=False)[source]

Deletes the rowids of tablename that correspond to root_rowids using config.

FIXME: make this work for all configs

delete_root(root_rowids, delete_extern=None, _debug=False, table_config_filter=None, prop=None)[source]

Deletes all properties of a root object regardless of config

Parameters

root_rowids (list) –

CommandLine:

python -m dtool.depcache_control delete_root –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> exec(ut.execstr_funckw(depc.delete_root), globals())
>>> root_rowids = [1]
>>> depc.delete_root(root_rowids)
>>> depc.get('fgweight', [1])
>>> depc.delete_root(root_rowids)
property explicit_graph
get(tablename, root_rowids, colnames=None, config=None, ensure=True, _debug=None, recompute=False, recompute_all=False, eager=True, nInput=None, read_extern=True, onthefly=False, num_retries=3, retry_delay_min=1, retry_delay_max=3, hack_paths=False)[source]

Access dependant properties the primary objects using primary ids.

Gets the data in colnames of tablename that correspond to root_rowids using config. if colnames is None, all columns are returned.

Parameters
  • tablename (str) – table name containing desired property

  • root_rowids (List[int]) – ids of the root object

  • colnames (None) – desired property (default = None)

  • config (None) – (default = None)

  • read_extern – if False then only returns extern URI

  • hack_paths – if False then does not compute extern info just returns path that it will be located at

Returns

prop_list

Return type

list

CommandLine:

python -m dtool.depcache_control –exec-get

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> tablename = 'labeler'
>>> root_rowids = aids
>>> prop_list = depc.get(
>>>     tablename, root_rowids, colnames)
>>> result = ('prop_list = %s' % (ut.repr2(prop_list),))
>>> print(result)
prop_list = [('labeler([root(1)]:42)',), ('labeler([root(2)]:42)',), ('labeler([root(3)]:42)',)]

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> tablename = 'smk_match'
>>> tablename = 'vocab'
>>> table = depc[tablename]
>>> root_rowids = [aids]
>>> prop_list = depc.get(
>>>     tablename, root_rowids, colnames, config)
>>> result = ('prop_list = %s' % (ut.repr2(prop_list),))
>>> print(result)
prop_list = [('vocab([root(1;2;3)]:42)',)]

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> depc = testdata_depc()
>>> tablename = 'chip'
>>> table = depc[tablename]
>>> root_rowids = aids
>>> # Ensure chips are computed
>>> prop_list1 = depc.get(tablename, root_rowids)
>>> # Get file paths and delete them
>>> prop_list2 = depc.get(tablename, root_rowids, read_extern=False)
>>> n = ut.remove_file_list(ut.take_column(prop_list2, 1))
>>> assert n == len(prop_list2), 'files were not computed'
>>> prop_list3 = depc.get(tablename, root_rowids)
>>> assert np.all(prop_list1[0][1] == prop_list3[0][1]), 'computed same info'
get_allconfig_descendant_rowids(root_rowids, table_config_filter=None)[source]
get_ancestor_rowids(tablename, native_rowids, ancestor_tablename=None)[source]

ancestor_tablename = depc.root; native_rowids = cid_list; tablename = const.CHIP_TABLE

get_config_history(tablename, root_rowids, config=None)[source]
get_config_trail(tablename, config)[source]
get_config_trail_str(tablename, config)[source]
get_db_by_name(name)[source]

Get the database (i.e. SQLController) for the given database name

get_dependencies(tablename)[source]

gets level dependences from root to tablename

CommandLine:

python -m dtool.depcache_control –exec-get_dependencies

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'fgweight'
>>> result = ut.repr3(depc.get_dependencies(tablename), nl=1)
>>> print(result)
[
    ['dummy_annot'],
    ['chip', 'probchip'],
    ['keypoint'],
    ['fgweight'],
]
get_edges(data=False)[source]

edges for networkx structure

get_implicit_edges(data=False)[source]

Edges defined by subconfigurations

get_native(tablename, tbl_rowids, colnames=None, _debug=None, read_extern=True)[source]

Gets data using internal ids, which is faster if you have them.

CommandLine:

python -m dtool.depcache_control get_native:0 python -m dtool.depcache_control get_native:1

Example

>>> # ENABLE_DOCTEST
>>> # Simple test of get native
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> config = {}
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> aids = [1,]
>>> tbl_rowids = depc.get_rowids(tablename, aids, config=config)
>>> data = depc.get_native(tablename, tbl_rowids)

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc()
>>> config = {}
>>> tablename = 'chip'
>>> colnames = extern_colname = 'chip'
>>> aids = [1, 2]
>>> depc.delete_property(tablename, aids, config=config)
>>> # Ensure chip rowids exist then delete external data without
>>> # notifying the depcache. This forces the depcache to recover
>>> tbl_rowids = chip_rowids = depc.get_rowids(tablename, aids, config=config)
>>> data_fpaths = depc.get(tablename, aids, extern_colname, config=config, read_extern=False)
>>> ut.remove_file_list(data_fpaths)
>>> chips = depc.get_native(tablename, tbl_rowids, extern_colname)
>>> print('chips = %r' % (chips,))
get_native_property(tablename, tbl_rowids, colnames=None, _debug=None, read_extern=True)

Gets data using internal ids, which is faster if you have them.

CommandLine:

python -m dtool.depcache_control get_native:0 python -m dtool.depcache_control get_native:1

Example

>>> # ENABLE_DOCTEST
>>> # Simple test of get native
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> config = {}
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> aids = [1,]
>>> tbl_rowids = depc.get_rowids(tablename, aids, config=config)
>>> data = depc.get_native(tablename, tbl_rowids)

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc()
>>> config = {}
>>> tablename = 'chip'
>>> colnames = extern_colname = 'chip'
>>> aids = [1, 2]
>>> depc.delete_property(tablename, aids, config=config)
>>> # Ensure chip rowids exist then delete external data without
>>> # notifying the depcache. This forces the depcache to recover
>>> tbl_rowids = chip_rowids = depc.get_rowids(tablename, aids, config=config)
>>> data_fpaths = depc.get(tablename, aids, extern_colname, config=config, read_extern=False)
>>> ut.remove_file_list(data_fpaths)
>>> chips = depc.get_native(tablename, tbl_rowids, extern_colname)
>>> print('chips = %r' % (chips,))
get_parent_rowids(target_tablename, input_tuple, config=None, **kwargs)[source]

Returns the parent rowids needed to get / compute a property of tablename

Parameters

input_tuple

to be explicit send in as a tuple of lists. Each list corresponds to parent information needed by expanded rmis (root most input).

Each item in the tuple correponds a root most node, and should be specified as a list of inputs. For single items this is a scalar, for multi-items it is a list.

For example if you have a property like a chip that depends on only one parent, then to get the chips for the first N annotations your list input tuple is:

input_tuple = ([1, 2, 3, …, N],)

For a single multi inputs: If you want to get two vocabs for even and odd annots then you have:

([[0, 2, 4, …], [1, 3, 5, …]],)

For a single comparasion version multi inputs: If you want to query the first N annotats against two vocabs then you have:

([1, 2, 3, …, N], [[0, 2, 4, …], [1, 3, 5, …]],)

(Note this only works if broadcasting is on)

get_property(tablename, root_rowids, colnames=None, config=None, ensure=True, _debug=None, recompute=False, recompute_all=False, eager=True, nInput=None, read_extern=True, onthefly=False, num_retries=3, retry_delay_min=1, retry_delay_max=3, hack_paths=False)

Access dependant properties the primary objects using primary ids.

Gets the data in colnames of tablename that correspond to root_rowids using config. if colnames is None, all columns are returned.

Parameters
  • tablename (str) – table name containing desired property

  • root_rowids (List[int]) – ids of the root object

  • colnames (None) – desired property (default = None)

  • config (None) – (default = None)

  • read_extern – if False then only returns extern URI

  • hack_paths – if False then does not compute extern info just returns path that it will be located at

Returns

prop_list

Return type

list

CommandLine:

python -m dtool.depcache_control –exec-get

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> tablename = 'labeler'
>>> root_rowids = aids
>>> prop_list = depc.get(
>>>     tablename, root_rowids, colnames)
>>> result = ('prop_list = %s' % (ut.repr2(prop_list),))
>>> print(result)
prop_list = [('labeler([root(1)]:42)',), ('labeler([root(2)]:42)',), ('labeler([root(3)]:42)',)]

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> tablename = 'smk_match'
>>> tablename = 'vocab'
>>> table = depc[tablename]
>>> root_rowids = [aids]
>>> prop_list = depc.get(
>>>     tablename, root_rowids, colnames, config)
>>> result = ('prop_list = %s' % (ut.repr2(prop_list),))
>>> print(result)
prop_list = [('vocab([root(1;2;3)]:42)',)]

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> aids = [1, 2, 3]
>>> depc = testdata_depc()
>>> tablename = 'chip'
>>> table = depc[tablename]
>>> root_rowids = aids
>>> # Ensure chips are computed
>>> prop_list1 = depc.get(tablename, root_rowids)
>>> # Get file paths and delete them
>>> prop_list2 = depc.get(tablename, root_rowids, read_extern=False)
>>> n = ut.remove_file_list(ut.take_column(prop_list2, 1))
>>> assert n == len(prop_list2), 'files were not computed'
>>> prop_list3 = depc.get(tablename, root_rowids)
>>> assert np.all(prop_list1[0][1] == prop_list3[0][1]), 'computed same info'
get_root_rowids(tablename, native_rowids)[source]
Parameters
  • tablename (str) –

  • native_rowids (list) –

Returns

Return type

list

CommandLine:

python -m dtool.depcache_control get_root_rowids –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc()
>>> config1 = {'adapt_shape': False}
>>> config2 = {'adapt_shape': True}
>>> root_rowids = [2, 3, 5, 7]
>>> native_rowids1 = depc.get_rowids('keypoint', root_rowids, config=config1)
>>> native_rowids2 = depc.get_rowids('keypoint', root_rowids, config=config2)
>>> ancestor_rowids1 = list(depc.get_root_rowids('keypoint', native_rowids1))
>>> ancestor_rowids2 = list(depc.get_root_rowids('keypoint', native_rowids2))
>>> assert native_rowids1 != native_rowids2, 'should have different native rowids'
>>> assert ancestor_rowids1 == root_rowids, 'should have same root'
>>> assert ancestor_rowids2 == root_rowids, 'should have same root'
get_rowids(tablename, input_tuple, **rowid_kw)[source]

Used to get tablename rowids. Ensures rows exist unless ensure=False. rowids uniquely specify parent inputs and a configuration.

CommandLine:

python -m dtool.depcache_control get_rowids –show python -m dtool.depcache_control get_rowids:1

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc3(True)
>>> exec(ut.execstr_funckw(depc.get), globals())
>>> kwargs = {}
>>> root_rowids = [1, 2, 3]
>>> root_rowids2 = [(4, 5, 6, 7)]
>>> root_rowids3 = root_rowids2
>>> tablename = 'smk_match'
>>> input_tuple = (root_rowids, root_rowids2, root_rowids3)
>>> target_table = depc[tablename]
>>> inputs = target_table.rootmost_inputs.total_expand()
>>> depc.get_rowids(tablename, input_tuple)
>>> depc.print_all_tables()

Example

>>> # ENABLE_DOCTEST
>>> # Test external / ensure getters
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> config = {}
>>> depc = testdata_depc()
>>> aids = [1,]
>>> depc.delete_property('keypoint', aids, config=config)
>>> chip_fpaths = depc.get('chip', aids, 'chip', config=config, read_extern=False)
>>> ut.remove_file_list(chip_fpaths)
>>> rowids = depc.get_rowids('keypoint', aids, ensure=True, config=config)
>>> print('rowids = %r' % (rowids,))

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc()
>>> depc.clear_all()
>>> root_rowids = [1, 2]
>>> config = {}
>>> # Recompute the first few, make sure the rowids do not change
>>> _ = depc.get_rowids('chip', root_rowids + [3], config=config)
>>> assert _ == [1, 2, 3]
>>> initial_rowids = depc.get_rowids('chip', root_rowids, config=config)
>>> recomp_rowids = depc.get_rowids('chip', root_rowids, config=config, recompute=True)
>>> assert recomp_rowids == initial_rowids, 'rowids should not change due to recompute'
get_tablenames()[source]
get_uuids(tablename, root_rowids, config=None)[source]

# TODO: Make uuids for dependant object based on root uuid and path of # construction.

property graph
initialize(_debug=None)[source]

Creates all registered tables

make_graph(**kwargs)[source]

Constructs a networkx representation of the dependency graph

CommandLine:

python -m dtool –tf DependencyCache.make_graph –show –reduced

python -m wbia.control.IBEISControl show_depc_annot_graph –show –reduced

python -m wbia.control.IBEISControl show_depc_annot_graph –show –reduced –testmode python -m wbia.control.IBEISControl show_depc_annot_graph –show –testmode

python -m wbia.control.IBEISControl –test-show_depc_image_graph –show –reduced python -m wbia.control.IBEISControl –test-show_depc_image_graph –show

python -m wbia.scripts.specialdraw double_depcache_graph –show –testmode

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> import utool as ut
>>> depc = testdata_depc()
>>> graph = depc.make_graph(reduced=ut.get_argflag('--reduced'))
>>> ut.quit_if_noshow()
>>> import wbia.plottool as pt
>>> pt.ensureqt()
>>> import networkx as nx
>>> #pt.show_nx(nx.dag.transitive_closure(graph))
>>> #pt.show_nx(ut.nx_transitive_reduction(graph))
>>> pt.show_nx(graph)
>>> pt.show_nx(graph, layout='agraph')
>>> ut.show_if_requested()

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> import utool as ut
>>> depc = testdata_depc()
>>> graph = depc.make_graph(reduced=True)
>>> # xdoctest: +REQUIRES(--show)
>>> ut.quit_if_noshow()
>>> import wbia.plottool as pt
>>> pt.ensureqt()
>>> import networkx as nx
>>> #pt.show_nx(nx.dag.transitive_closure(graph))
>>> #pt.show_nx(ut.nx_transitive_reduction(graph))
>>> pt.show_nx(graph)
>>> pt.show_nx(graph, layout='agraph')
>>> ut.show_if_requested()
make_root_info_uuid(root_rowids, info_props)[source]

Creates a uuid that depends on certain properties of the root object. This is used for implicit cache invalidation because, if those properties change then this uuid also changes.

The depcache needs to know about stateful properties of dynamic root objects in order to correctly compute their hashes.

>>> #ibs = wbia.opendb(defaultdb='testdb1')
>>> root_rowids = ibs._get_all_aids()
>>> depc = ibs.depc_annot
>>> info_props = ['image_uuid', 'verts', 'theta']
>>> info_props = ['image_uuid', 'verts', 'theta', 'name', 'species', 'yaw']
new_request(tablename, qaids, daids, cfgdict=None)[source]

creates a request for data that can be executed later

notify_root_changed(root_rowids, prop, force_delete=False)[source]

this is where we are notified that a “registered” root property has changed.

print_all_tables()[source]
print_config_tables()[source]
print_schemas()[source]
print_table(tablename)[source]
rectify_input_tuple(exi_inputs, input_tuple)[source]

Standardizes inputs allowed for convinience into the expected input for get_parent_rowids.

property reduced_graph
register_delete_table_exclusion(tablename, prop)[source]
register_preproc(*args, **kwargs)[source]

Decorator for registration of cachables

Parameters
  • tablename (str) – name of the node (corrsponds to SQL table)

  • parents (list) – tables this node depends on

  • colnames (list) – data returned by this table

  • coltypes (list) – types of data returned by this table

  • chunksize (int) – (default = None)

  • configclass (dtool.TableConfig) – derivative of dtool.TableConfig. if None, a default class will be constructed for you. (default = None)

  • docstr (str) – (default = None)

  • fname (str) – file name(default = None)

  • asobject (bool) – hacky dont use (default = False)

SeeAlso:

depcache_table.DependencyCacheTable

property root
show_graph(reduced=False, **kwargs)[source]

Helper “fluff” function

stacked_config(source, dest, config)[source]
CommandLine:

python -m dtool.depcache_control stacked_config –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> source = depc.root
>>> dest = 'fgweight'
>>> config = {}
>>> stacked_config = depc.stacked_config(source, dest, config)
>>> cfgstr = stacked_config.get_cfgstr()
>>> result = ('cfgstr = %s' % (ut.repr2(cfgstr),))
>>> print(result)
property tablenames
property tables
wbia.dtool.depcache_control.check_register(args, kwargs)[source]
wbia.dtool.depcache_control.make_depcache_decors(root_tablename)[source]

Makes global decorators to register functions for a tablename.

A preproc function is meant to belong only to a single parent An algo function belongs to the root node, and may depend on a set of root nodes rather than just a single one.

wbia.dtool.depcache_table module

Module contining DependencyCacheTable

python -m dtool.depcache_control –exec-make_graph –show python -m dtool.depcache_control –exec-make_graph –show –reduce

FIXME:
RECTIFY: ismulti / ismodel need to be rectified. This indicate that this

table recieves multiple inputs from at least one parent table.

RECTIFY: Need to standardize parent rowids -vs- parent args.

in one-to-one cases they are the same. In multi cases the rowids indicate a uuid and the args are the saved set of rowids that exist in the manifest.

RECTIFY: is rowid_list row-major or column-major?

I think currently rowid_list is row-major and rowid_listT is column-major but this may not be consistent.

class wbia.dtool.depcache_table.DependencyCacheTable(depc=None, parent_tablenames=None, tablename=None, data_colnames=None, data_coltypes=None, preproc_func=None, docstr='no docstr', fname=None, asobject=False, chunksize=None, isinteractive=False, default_to_unpack=False, default_onthefly=False, rm_extern_on_delete=False, vectorized=True, taggable=False)[source]

Bases: wbia.dtool.depcache_table._TableGeneralHelper, wbia.dtool.depcache_table._TableInternalSetup, wbia.dtool.depcache_table._TableDebugHelper, wbia.dtool.depcache_table._TableComputeHelper, wbia.dtool.depcache_table._TableConfigHelper

An individual node in the dependency graph.

All SQL column information is stored in:

internal_col_attrs - keeps track of internal info

Additional metadata about specific columns is stored in

parent_col_attrs - keeps track of parent info data_col_attrs - keeps track of computed data

db

pointer to underlying database

Type

dtool.SQLDatabaseController

depc

pointer to parent cache

Type

dtool.DependencyCache

tablename

name of the table

Type

str

docstr

documentation for table

Type

str

parent_tablenames

parent tables in depcache

Type

str

data_colnames

columns produced by preproc_func

Type

List[str]

data_coltypes

column SQL types produced by preproc_func

Type

List[str]

preproc_func

worker function

Type

func

vectorized

by defaults it is assumed registered functions can process multiple inputs at once.

Type

bool

taggable

specifies if a computed object can be disconected from its ancestors and accessed via a tag.

Type

bool

CommandLine:

python -m dtool.depcache_table –exec-DependencyCacheTable

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_table import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> print(depc['vsmany'])
>>> print(depc['spam'])
>>> print(depc['vsone'])
>>> print(depc['nnindexer'])
clear_table()[source]

Deletes all data in this table

delete_rows(rowid_list, delete_extern=None, dry=False, verbose=None)[source]
CommandLine:

python -m dtool.depcache_table –exec-delete_rows

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_table import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> #table = depc['keypoint']
>>> table = depc['chip']
>>> exec(ut.execstr_funckw(table.delete_rows), globals())
>>> tablename = table.tablename
>>> graph = depc.explicit_graph
>>> config1 = None
>>> config2 = table.configclass(version=-1)
>>> config3 = table.configclass(version=-1, ext='.jpg')
>>> config4 = table.configclass(ext='.jpg')
>>> # Create several configs of rowid
>>> aids = [1, 2, 3]
>>> depc.get_rowids('spam', aids, config=config1)
>>> depc.get_rowids('spam', aids, config=config2)
>>> depc.get_rowids('spam', aids, config=config3)
>>> depc.get_rowids('spam', aids, config=config4)
>>> # Delete the png configs
>>> rowid_list1 = depc.get_rowids(table.tablename, aids,
>>>                               config=config2)
>>> rowid_list2 = depc.get_rowids(table.tablename, aids,
>>>                               config=config1)
>>> rowid_list = rowid_list1 + rowid_list2
>>> assert len(ut.setintersect_ordered(rowid_list1, rowid_list2)) == 0
>>> table.delete_rows(rowid_list)
ensure_rows(parent_ids_, preproc_args, config=None, verbose=True, _debug=None, retry=3, retry_delay_min=1, retry_delay_max=10)[source]

Lazy addition

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_table import *  # NOQA
>>> from wbia.dtool.example_depcache2 import testdata_depc3
>>> depc = testdata_depc3()
>>> table = depc['vsone']
>>> exec(ut.execstr_funckw(table.get_rowid), globals())
>>> config = table.configclass()
>>> verbose = True
>>> # test duplicate inputs are detected and accounted for
>>> parent_rowids = [(i, i) for i in list(range(100))] * 100
>>> rectify_tup = table._rectify_ids(parent_rowids)
>>> (parent_ids_, preproc_args, idxs1, idxs2) = rectify_tup
>>> rowids = table.ensure_rows(parent_ids_, preproc_args, config=config)
>>> result = ('rowids = %r' % (rowids,))
>>> print(result)
export_rows(rowid, target)[source]

The goal of this is to export taggable data that can be used independantly of its dependant features.

TODO List:
  • Gather information about columns
    • Native and (localized) external data
      • <table>_rowid - non-transferable

      • Parent UUIDS - non-transferable

      • config rowid - non-transferable

      • model_uuid -

      • augment_bit - transferable - trivial

      • words_extern_uri - copy to destination

      • feat_setsize - transferable - trivial

      • model_tag

    • Should also gather info from manifest:
      • feat_setuuid_primary_ids - non-transferable

      • feat_setuuid_model_input - non-transferable

    • Should gather exhaustive config history

  • Save to disk

  • Add function to reload data in exported format

  • Getters should be able to specify a tag inplace of the root input

for the tagged. Additionally native root-ids should also be allowed.

rowid = 1

property fname

Backwards compatible name of the database this Table belongs to

classmethod from_name(db_name, table_name, depcache_controller, parent_tablenames=None, data_colnames=None, data_coltypes=None, preproc_func=None, docstr='no docstr', asobject=False, chunksize=None, default_to_unpack=False, rm_extern_on_delete=False, vectorized=True, taggable=False)[source]

Build the instance based on a database and table name.

get_internal_columns(tbl_rowids, colnames=None, eager=True, nInput=None, unpack_scalars=True, keepwrap=False, showprog=False)[source]

Access data in this table using the table PRIMARY KEY rowids (not depc PRIMARY ids)

get_row_data(tbl_rowids, colnames=None, _debug=None, read_extern=True, num_retries=1, eager=True, nInput=None, ensure=True, delete_on_fail=True, showprog=False, unpack_columns=None)[source]

FIXME: unpacking is confusing with sql controller TODO: Clean up and allow for eager=False

colnames = (‘mask’, ‘size’)

CommandLine:

python -m dtool.depcache_table –test-get_row_data:0 python -m dtool.depcache_table –test-get_row_data:1

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_table import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> table = depc['chip']
>>> exec(ut.execstr_funckw(table.get_row_data), globals())
>>> tbl_rowids = depc.get_rowids('chip', [1, 2, 3], _debug=True, recompute=True)
>>> colnames = ('size_1', 'size', 'chip' + EXTERN_SUFFIX, 'chip')
>>> kwargs = dict(read_extern=True, num_retries=1, _debug=True)
>>> prop_list = table.get_row_data(tbl_rowids, colnames, **kwargs)
>>> prop_list0 = ut.take_column(prop_list, [0, 1, 2]) # data subset
>>> result = (ut.repr2(prop_list0, nl=1))
>>> print(result)
>>> #_debug, num_retries, read_extern = True, 1, True
>>> prop_gen = table.get_row_data(tbl_rowids, colnames, eager=False)
>>> prop_list2 = list(prop_gen)
>>> assert len(prop_list2) == len(prop_list), 'inconsistent lens'
>>> assert all([ut.lists_eq(prop_list2[1], prop_list[1]) for x in range(len(prop_list))]), 'inconsistent vals'
>>> chips = table.get_row_data(tbl_rowids, 'chip', eager=False)
[

[2453, (1707, 2453), ‘chip_chip_id=1_pyrappzicqoskdjq.png’], [250, (300, 250), ‘chip_chip_id=2_pyrappzicqoskdjq.png’], [372, (545, 372), ‘chip_chip_id=3_pyrappzicqoskdjq.png’],

]

Example

>>> # ENABLE_DOCTEST
>>> # Test external / ensure getters
>>> from wbia.dtool.example_depcache import *  # NOQA
>>> depc = testdata_depc()
>>> table = depc['chip']
>>> exec(ut.execstr_funckw(table.get_row_data), globals())
>>> depc.clear_all()
>>> config = {}
>>> aids = [1,]
>>> read_extern = False
>>> tbl_rowids = depc.get_rowids('chip', aids, config=config)
>>> data_fpaths = depc.get('chip', aids, 'chip', config=config, read_extern=False)
>>> # Ensure data is recomputed if an external file is missing
>>> ut.remove_fpaths(data_fpaths)
>>> data = table.get_row_data(tbl_rowids, 'chip', read_extern=False, ensure=False)
>>> data = table.get_row_data(tbl_rowids, 'chip', read_extern=False, ensure=True)
get_rowid(parent_rowids, config=None, ensure=True, eager=True, nInput=None, recompute=False, _debug=None, num_retries=1)[source]

Returns the rowids of derived properties. If they do not exist it computes them.

Parameters
  • parent_rowids (list) – list of tuples with the parent rowids as the value of each tuple

  • config (None) – (default = None)

  • ensure (bool) – eager evaluation if True (default = True)

  • eager (bool) – (default = True)

  • nInput (int) – (default = None)

  • recompute (bool) – (default = False)

  • _debug (None) – (default = None) deprecated; no-op

Returns

rowid_list

Return type

list

CommandLine:

python -m dtool.depcache_table –exec-get_rowid

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.depcache_table import *  # NOQA
>>> from wbia.dtool.example_depcache2 import testdata_depc3
>>> depc = testdata_depc3()
>>> table = depc['labeler']
>>> exec(ut.execstr_funckw(table.get_rowid), globals())
>>> config = table.configclass()
>>> parent_rowids = list(zip([1, None, None, 2]))
>>> rowids = table.get_rowid(parent_rowids, config=config)
>>> result = ('rowids = %r' % (rowids,))
>>> print(result)
rowids = [1, None, None, 2]
initialize(_debug=None)[source]

Ensures the SQL schema for this cache table

property number_of_rows
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

class wbia.dtool.depcache_table.ExternType(read_func, write_func, extern_ext=None, extkey=None)[source]

Bases: ubelt.util_mixins.NiceRepr

Type to denote an external resource not saved in an SQL table

exception wbia.dtool.depcache_table.ExternalStorageException(*args, **kwargs)[source]

Bases: Exception

Indicates a missing external file

exception wbia.dtool.depcache_table.TableOutOfSyncError(db, tablename, extended_msg)[source]

Bases: Exception

Raised when the code’s table definition doesn’t match the defition in the database

wbia.dtool.depcache_table.ensure_config_table(db)[source]

SQL definition of configuration table.

wbia.dtool.depcache_table.make_extern_io_funcs(table, cls)[source]

Hack in read/write defaults for pickleable classes

wbia.dtool.depcache_table.predrop_grace_period(tablename, seconds=None)[source]

Hack that gives the user some time to abort deleting everything

wbia.dtool.example_depcache module

wbia.dtool.example_depcache2 module

wbia.dtool.input_helpers module

class wbia.dtool.input_helpers.BranchId(accum_ids, k, parent_colx)[source]

Bases: utool.util_class.HashComparable

class wbia.dtool.input_helpers.ExiNode(node_id, branch_id)[source]

Bases: utool.util_class.HashComparable

Expanded Input Node

helps distinguish nodes and branch_ids

property branch_id
property node_id
class wbia.dtool.input_helpers.RootMostInput(node, sink, exi_graph)[source]

Bases: utool.util_class.HashComparable

compute_order()[source]

Returns order of computation from this input node to the sink

property ismulti
parent_level()[source]

Returns rootmost inputs above this node

Example

>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc4()
>>> inputs = depc['smk_match'].rootmost_inputs
>>> rmi = inputs.rmi_list[1]
>>> assert len(rmi.parent_level()) == 2
class wbia.dtool.input_helpers.TableInput(rmi_list, exi_graph, table, reorder=False)[source]

Bases: utool.util_dev.NiceRepr

Specifies a set of inputs that can validly compute the output of a table in the dependency graph

exi_nodes()[source]
expand_input(index, inplace=False)[source]

Pushes the rootmost inputs all the way up to the sources of the graph

CommandLine:

python -m dtool.input_helpers expand_input

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc4()
>>> inputs = depc['smk_match'].rootmost_inputs
>>> inputs = depc['neighbs'].rootmost_inputs
>>> print('(pre-expand)  inputs  = %r' % (inputs,))
>>> index = 'indexer'
>>> inputs2 = inputs.expand_input(index)
>>> print('(post-expand) inputs2 = %r' % (inputs2,))
>>> assert 'indexer' in str(inputs), 'missing indexer1'
>>> assert 'indexer' not in str(inputs2), (
>>>     '(2) unexpected indexer in %s' % (inputs2,))
expected_input_depth()[source]

Example

>>> # DISABLE_DOCTEST
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc4()
>>> inputs = depc['neighbs'].rootmost_inputs
>>> index = 'indexer'
>>> inputs = inputs.expand_input(index)
>>> size = inputs.expected_input_depth()
>>> print('size = %r' % (size,))
>>> inputs = depc['feat'].rootmost_inputs
>>> size = inputs.expected_input_depth()
>>> print('size = %r' % (size,))
flat_compute_order()[source]

This is basically the scheduler

Todo

We need to verify the correctness of this logic. It seems to not be deterministic between versions of python.

CommandLine:

python -m dtool.input_helpers flat_compute_order

Example

>>> # xdoctest: +REQUIRES(--fixme)
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc4()
>>> inputs = depc['feat'].rootmost_inputs.total_expand()
>>> flat_compute_order = inputs.flat_compute_order()
>>> result = ut.repr2(flat_compute_order)
...
>>> print(result)
[chip[t, t:1, 1:1], probchip[t, t:1, 1:1], feat[t, t:1]]
flat_compute_rmi_edges()[source]

Defines order of computation that maps input_ids to target_ids.

CommandLine:

python -m dtool.input_helpers flat_compute_rmi_edges

Returns

compute_edges
Each item is a tuple of input/output RootMostInputs

([parent_1, …, parent_n], node_i)

All parents should be known before you reach the i-th item in the list. Results of the the i-th item may be used in subsequent item computations.

Return type

list

Example

>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc =testdata_custom_annot_depc([
...    dict(tablename='chips', parents=['annot']),
...    dict(tablename='Notch_Tips', parents=['annot']),
...    dict(tablename='Cropped_Chips', parents=['chips', 'Notch_Tips']),
... ])
>>> table = depc['Cropped_Chips']
>>> inputs = exi_inputs = table.rootmost_inputs.total_expand()
>>> compute_rmi_edges = exi_inputs.flat_compute_rmi_edges()
>>> input_rmis = compute_rmi_edges[-1][0]
>>> result = ut.repr2(input_rmis)
>>> print(result)
[chips[t, t:1, 1:1], Notch_Tips[t, t:1, 1:1]]
is_single_inputs()[source]
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

show_exi_graph(inter=None)[source]
CommandLine:

python -m dtool.input_helpers TableInput.show_exi_graph –show

Example

>>> # DISABLE_DOCTEST
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc3()
>>> # table = depc['smk_match']
>>> table = depc['neighbs']
>>> inputs = table.rootmost_inputs
>>> print('inputs = %r' % (inputs,))
>>> import wbia.plottool as pt
>>> from wbia.plottool.interactions import ExpandableInteraction
>>> inter = ExpandableInteraction(nCols=1)
>>> inputs.show_exi_graph(inter=inter)
>>> # FIXME; Expanding inputs can overspecify inputs
>>> #inputs = inputs.expand_input(2)
>>> #print('inputs = %r' % (inputs,))
>>> #inputs.show_exi_graph(inter=inter)
>>> #inputs = inputs.expand_input(1)
>>> #inputs = inputs.expand_input(3)
>>> #inputs = inputs.expand_input(2)
>>> #inputs = inputs.expand_input(2)
>>> #inputs = inputs.expand_input(1)
>>> #print('inputs = %r' % (inputs,))
>>> #inputs.show_exi_graph(inter=inter)
>>> inter.start()
>>> ut.show_if_requested()
total_expand()[source]
wbia.dtool.input_helpers.get_rootmost_inputs(exi_graph, table)[source]
CommandLine:

python -m dtool.input_helpers get_rootmost_inputs –show

Parameters
  • exi_graph (nx.Graph) – made from make_expanded_input_graph(graph, target)

  • table (dtool.Table) –

CommandLine:

python -m dtool.input_helpers get_rootmost_inputs

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc = testdata_depc3()
>>> tablename = 'smk_match'
>>> table = depc[tablename]
>>> exi_graph = table.expanded_input_graph
>>> inputs_ = get_rootmost_inputs(exi_graph, table)
>>> print('inputs_ = %r' % (inputs_,))
>>> inputs = inputs_.expand_input(1)
>>> rmi = inputs.rmi_list[0]
>>> result = ('inputs = %s' % (inputs,)) + '\n'
>>> result += ('compute_edges = %s' % (ut.repr2(inputs.flat_compute_rmi_edges(), nl=1)))
>>> print(result)
wbia.dtool.input_helpers.make_expanded_input_graph(graph, target)[source]

Starting from the target property we trace all possible paths in the graph back to all sources.

Parameters
  • graph (nx.DiMultiGraph) – the dependency graph with a single source.

  • target (str) – a single target node in graph

Notes

Each edge in the graph must have a local_input_id that defines the type of edge it is: (eg one-to-many, one-to-one, nwise/multi).

# Step 1: Extracting the Relevant Subgraph We start by searching for all sources of the graph (we assume there is only one). Then we extract the subgraph defined by all edges between the sources and the target. We augment this graph with a dummy super source s and super sink t. This allows us to associate an edge with the real source and sink.

# Step 2: Trace all paths from s to t. Create a set of all paths from the source to the sink and accumulate the local_input_id of each edge along the path. This will uniquely identify each path. We use a hack to condense the accumualated ids in order to display them nicely.

# Step 3: Create the new exi_graph Using the traced paths with ids we construct a new graph representing expanded inputs. The nodes in the original graph will be copied for each unique path that passes through the node. We identify these nodes using the accumulated ids built along the edges in our path set. For each path starting from the target we add each node augmented with the accumulated ids on its output(?) edge. We also add the edges along these paths which results in the final exi_graph.

# Step 4: Identify valid inputs candidates The purpose of this graph is to identify which inputs are needed to compute dependant properties. One valid set of inputs is all sources of the graph. However, sometimes it is preferable to specify a model that may have been trained from many inputs. Therefore any node with a one-to-many input edge may also be specified as an input.

# Step 5: Identify root-most inputs The user will only specify one possible set of the inputs. We refer to this set as the “root-most” inputs. This is a set of candiate nodes such that all paths from the sink to the super source are blocked. We default to the set of inputs which results in the fewest dependency computations. However this is arbitary.

The last step that is not represented here is to compute the order that the branches must be specified in when given to the depcache for a computation.

Returns

exi_graph: the expanded input graph

Return type

nx.DiGraph

Notes

All * nodes are defined to be distinct. TODO: To make a * node non-distinct it must be suffixed with an identifier.

CommandLine:

python -m dtool.input_helpers make_expanded_input_graph –show

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import * # NOQA
>>> depc = testdata_depc3()
>>> table = depc['smk_match']
>>> table = depc['vsone']
>>> graph = table.depc.explicit_graph.copy()
>>> target = table.tablename
>>> exi_graph = make_expanded_input_graph(graph, target)
>>> x = list(exi_graph.nodes())[0]
>>> print('x = %r' % (x,))
>>> # xdoctest: +REQUIRES(--show)
>>> ut.quit_if_noshow()
>>> import wbia.plottool as pt
>>> pt.show_nx(graph, fnum=1, pnum=(1, 2, 1))
>>> pt.show_nx(exi_graph, fnum=1, pnum=(1, 2, 2))
>>> ut.show_if_requested()
wbia.dtool.input_helpers.recolor_exi_graph(exi_graph, rootmost_nodes)[source]
wbia.dtool.input_helpers.sort_rmi_list(rmi_list)[source]
CommandLine:

python -m dtool.input_helpers sort_rmi_list

Example

>>> from wbia.dtool.input_helpers import *  # NOQA
>>> from wbia.dtool.example_depcache2 import *  # NOQA
>>> depc =testdata_custom_annot_depc([
...    dict(tablename='Notch_Tips', parents=['annot']),
...    dict(tablename='chips', parents=['annot']),
...    dict(tablename='Cropped_Chips', parents=['chips', 'Notch_Tips']),
... ])
>>> table = depc['Cropped_Chips']
>>> inputs = exi_inputs = table.rootmost_inputs
>>> compute_rmi_edges = exi_inputs.flat_compute_rmi_edges()
>>> input_rmis = compute_rmi_edges[-1][0]
>>> rmi_list = input_rmis[::-1]
>>> rmi_list = sort_rmi_list(rmi_list)
>>> assert rmi_list[0].node[0] == 'chips'

wbia.dtool.sql_control module

Interface into SQL for the IBEIS Controller

TODO; need to use some sort of sticky bit so sql files are created with reasonable permissions.

class wbia.dtool.sql_control.SQLColumnRichInfo(column_id, name, type_, notnull, dflt_value, pk)

Bases: tuple

column_id

Alias for field number 0

dflt_value

Alias for field number 4

name

Alias for field number 1

notnull

Alias for field number 3

pk

Alias for field number 5

type_

Alias for field number 2

class wbia.dtool.sql_control.SQLDatabaseController(uri, name, readonly=False, timeout=600)[source]

Bases: object

Interface to an SQL database

class Metadata(ctrlr)[source]

Bases: collections.abc.Mapping

Metadata is an attribute of the SQLDatabaseController that facilitates easy usages by internal and exteral users. Each metadata attributes represents a table (i.e. an instance of TableMetadata). Each TableMetadata instance has metadata names as attributes. The TableMetadata can also be adapated to a dictionary for compatability.

The the database attribute is a special case that results in a DatabaseMetadata instance rather than TableMetadata. This primarily give access to the version and initial UUID, respectively as database.version and database.init_uuid.

Parameters

ctrlr (SQLDatabaseController) – parent controller object

class DatabaseMetadata(ctrlr)[source]

Bases: collections.abc.MutableMapping

Special metadata for database information

property init_uuid
property version
class TableMetadata(ctrlr, table_name)[source]

Bases: collections.abc.MutableMapping

Metadata on a particular SQL table

update(**kwargs)[source]

Update or insert the value into the metadata table with the given keyword arguments of metadata field names

add_cleanly(tblname, colnames, params_iter, get_rowid_from_superkey, superkey_paramx=(0,), **kwargs)[source]

ADDER Extra input: the first item of params_iter must be a superkey (like a uuid),

Does not add None values. Does not add duplicate values. For each None input returns None ouptut. For each duplicate input returns existing rowid

Parameters
  • tblname (str) – table name to add into

  • colnames (tuple of strs) – columns whos values are specified in params_iter

  • params_iter (iterable) – an iterable of tuples where each tuple corresonds to a row

  • get_rowid_from_superkey (func) – function that tests if a row needs to be added. It should return None for any new rows to be inserted. It should return the existing rowid if one exists

  • superkey_paramx (tuple of ints) – indices of tuples in params_iter which correspond to superkeys. defaults to (0,)

Returns

rowid_list_ – list of newly added or previously added rowids

Return type

iterable

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> db = SQLDatabaseController('sqlite:///', 'testing')
>>> db.add_table('dummy_table', (
>>>     ('rowid',               'INTEGER PRIMARY KEY'),
>>>     ('key',                 'TEXT'),
>>>     ('superkey1',           'TEXT'),
>>>     ('superkey2',           'TEXT'),
>>>     ('val',                 'TEXT'),
>>> ),
>>>     superkeys=[('key',), ('superkey1', 'superkey2')],
>>>     docstr='')
>>> db.print_schema()
>>> tblname = 'dummy_table'
>>> colnames = ('key', 'val')
>>> params_iter = [('spam', 'eggs'), ('foo', 'bar')]
>>> # Find a useable superkey
>>> superkey_colnames = db.get_table_superkey_colnames(tblname)
>>> superkey_paramx = None
>>> for superkey in superkey_colnames:
>>>    if all(k in colnames for k in superkey):
>>>        superkey_paramx = [colnames.index(k) for k in superkey]
>>>        superkey_colnames = ut.take(colnames, superkey_paramx)
>>>        break
>>> def get_rowid_from_superkey(superkeys_list):
>>>     return db.get_where_eq(tblname, ('rowid',), zip(superkeys_list), superkey_colnames)
>>> rowid_list_ = db.add_cleanly(
>>>     tblname, colnames, params_iter, get_rowid_from_superkey, superkey_paramx)
>>> print(rowid_list_)
add_column(tablename, colname, coltype)[source]
add_table(tablename=None, coldef_list=None, **metadata_keyval)[source]
Parameters
  • tablename (str) –

  • coldef_list (list) –

  • constraint (list or None) –

  • docstr (str) –

  • superkeys (list or None) – list of tuples of column names which uniquely identifies a rowid

backup(backup_filepath)[source]

backup_filepath = dst_fpath

check_rowid_exists(tablename, rowid_iter, eager=True, **kwargs)[source]

Check for the existence of rows (rowid_iter) in a table (tablename). Returns as sequence of rowids that exist in the given sequence.

The ‘rowid’ term is an alias for the primary key. When calling this method, you should know that the primary key may be more than one column.

connect()[source]

Create a connection instance to wrap a SQL execution block as a context manager

delete(tblname, id_list, id_colname='rowid', **kwargs)[source]

Deletes rows from a SQL table (tblname) by ID, given a sequence of IDs (id_list). Optionally a different ID column can be specified via id_colname.

delete_rowids(tblname, rowid_list, **kwargs)[source]

deletes the the rows in rowid_list

drop_all_tables()[source]

DELETES ALL INFO IN TABLE

drop_table(tablename, invalidate_cache=True)[source]
dump_schema()[source]

Convenience: Dumps all csv database files to disk NOTE: This function is semi-obsolete because of the auto-generated current schema file. Use dump_schema_current_autogeneration instead for all purposes except for parsing out the database schema or for consice visual representation.

dump_tables_to_csv(dump_dir=None)[source]

Convenience: Dumps all csv database files to disk

ensure_postgresql_types(conn)[source]

Create a connection instance to wrap a SQL execution block as a context manager

executemany(operation, params_iter, unpack_scalars=True, keepwrap=False, **kwargs)[source]

Executes the given operation once for each item in params_iter

Parameters
  • operation (str) – SQL operation

  • params_iter (sequence) – a sequence of sequences containing parameters in the sql operation

  • unpack_scalars (bool) – [deprecated] use to unpack a single result from each query only use with operations that return a single result for each query (default: True)

executeone(operation, params=(), eager=True, verbose=False, use_fetchone_behavior=False, keepwrap=False)[source]

Executes the given operation once with the given set of params

Parameters
  • operation (str|TextClause) – SQL statement

  • params (sequence|dict) – parameters to pass in with SQL execution

  • eager – [deprecated] no-op

  • verbose – [deprecated] no-op

  • use_fetchone_behavior (bool) – Use DBAPI fetchone behavior when outputing no rows (i.e. None)

exists_where_eq(tblname, params_iter, where_colnames, op='AND', unpack_scalars=True, eager=True, **kwargs)[source]

hacked in function for nicer templates

get(tblname, colnames, id_iter=None, id_colname='rowid', eager=True, assume_unique=False, batch_size=10000, **kwargs)[source]

Get rows of data by ID

Parameters
  • tblname (str) – table name to get from

  • colnames (tuple of str) – column names to grab from

  • id_iter (iterable) – iterable of search keys

  • id_colname (str) – column to be used as the search key (default: rowid)

  • eager (bool) – use eager evaluation

  • assume_unique (bool) – default False. Experimental feature that could result in a 10x speedup

  • unpack_scalars (bool) – default True

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> depc.clear_all()
>>> rowids = depc.get_rowids('notch', [1, 2, 3])
>>> table = depc['notch']
>>> db = table.db
>>> table.print_csv()
>>> # Break things to test set
>>> colnames = ('dummy_annot_rowid',)
>>> got_data = db.get('notch', colnames, id_iter=rowids)
>>> assert got_data == [1, 2, 3]
get_all_col_rows(tblname, colname)[source]

returns a list of all rowids from a table in ascending order

get_all_rowids(tblname, **kwargs)[source]

returns a list of all rowids from a table in ascending order

get_all_rowids_where(tblname, where_clause, params, **kwargs)[source]

returns a list of rowids from a table in ascending order satisfying a condition

get_coldef_list(tablename)[source]
Returns

each tuple is (col_name, col_type)

Return type

list of (str, str)

get_column(tablename, name)[source]

Get all the values for the specified column (name) of the table (tablename)

get_column_names(tablename)[source]

Conveinience: Returns the sql tablename columns

get_columns(tablename)[source]
Parameters

tablename (str) – table name

Returns

list of tuples with format:
(

column_id : id of the column name : the name of the column type_ : the type of the column notnull : 0 or 1 if the column can contains null values dflt_value : the default value pk : 0 or 1 if the column partecipate to the primary key

)

Return type

column_list

References

http://stackoverflow.com/questions/17717829/how-to-get-column-names-from-a-table-in-sqlite-via-pragma-net-c http://stackoverflow.com/questions/1601151/how-do-i-check-in-sqlite-whether-a-table-exists

CommandLine:

python -m dtool.sql_control –exec-get_columns python -m dtool.sql_control –exec-get_columns –tablename=contributors python -m dtool.sql_control –exec-get_columns –tablename=nonexist

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> colrichinfo_list = db.get_columns(tablename)
>>> result = ('colrichinfo_list = %s' % (ut.repr2(colrichinfo_list, nl=1),))
>>> print(result)
colrichinfo_list = [
    (0, 'keypoint_rowid', 'INTEGER', 0, None, 1),
    (1, 'chip_rowid', 'INTEGER', 1, None, 0),
    (2, 'config_rowid', 'INTEGER', 0, '0', 0),
    (3, 'kpts', 'NDARRAY', 0, None, 0),
    (4, 'num', 'INTEGER', 0, None, 0),
]
get_db_init_uuid(ensure=True)[source]

Get the database initialization (creation) UUID

CommandLine:

python -m dtool.sql_control get_db_init_uuid

Example

>>> # ENABLE_DOCTEST
>>> import uuid
>>> import os
>>> from wbia.dtool.sql_control import *  # NOQA
>>> # Check random database gets new UUID on init
>>> db = SQLDatabaseController('sqlite:///', 'testing')
>>> uuid_ = db.get_db_init_uuid()
>>> print('New Database: %r is valid' % (uuid_, ))
>>> assert isinstance(uuid_, uuid.UUID)
>>> # Check existing database keeps UUID
>>> sqldb_dpath = ut.ensure_app_resource_dir('dtool')
>>> sqldb_fname = u'test_database.sqlite3'
>>> path = os.path.join(sqldb_dpath, sqldb_fname)
>>> db_uri = 'sqlite:///{}'.format(os.path.realpath(path))
>>> db1 = SQLDatabaseController(db_uri, 'db1')
>>> uuid_1 = db1.get_db_init_uuid()
>>> db2 = SQLDatabaseController(db_uri, 'db2')
>>> uuid_2 = db2.get_db_init_uuid()
>>> print('Existing Database: %r == %r' % (uuid_1, uuid_2, ))
>>> assert uuid_1 == uuid_2
get_db_version(ensure=True)[source]
get_metadata_items()[source]
Returns

metadata_items

Return type

list

CommandLine:

python -m dtool.sql_control –exec-get_metadata_items

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import testdata_depc
>>> from wbia.dtool.sql_control import *  # NOQA
>>> db = testdata_depc()['notch'].db
>>> metadata_items = db.get_metadata_items()
>>> result = ('metadata_items = %s' % (ut.repr2(sorted(metadata_items)),))
>>> print(result)
get_metadata_val(key, eval_=False, default=None)[source]

val is the repr string unless eval_ is true

get_row_count(tblname)[source]
get_rowid_from_superkey(tblname, params_iter=None, superkey_colnames=None, **kwargs)[source]

getter which uses the constrained superkeys instead of rowids

get_schema_current_autogeneration_str(autogen_cmd='')[source]

Convenience: Autogenerates the most up-to-date database schema

CommandLine:

python -m dtool.sql_control –exec-get_schema_current_autogeneration_str

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> result = db.get_schema_current_autogeneration_str('')
>>> print(result)
get_sql_version()[source]

Conveinience

get_table_as_pandas(tablename, rowids=None, columns=None, exclude_columns=[])[source]

aid = 30 db = ibs.staging rowids = ut.flatten(ibs.get_review_rowids_from_single([aid])) tablename = ‘reviews’ exclude_columns = ‘review_user_confidence review_user_identity’.split(’ ‘) logger.info(db.get_table_as_pandas(tablename, rowids, exclude_columns=exclude_columns))

db = ibs.db rowids = ut.flatten(ibs.get_annotmatch_rowids_from_aid([aid])) tablename = ‘annotmatch’ exclude_columns = ‘annotmatch_confidence annotmatch_posixtime_modified annotmatch_reviewer’.split(’ ‘) logger.info(db.get_table_as_pandas(tablename, rowids, exclude_columns=exclude_columns))

get_table_autogen_dict(tablename)[source]
Parameters

tablename (str) –

Returns

autogen_dict

Return type

dict

CommandLine:

python -m dtool.sql_control get_table_autogen_dict

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> db = SQLDatabaseController('sqlite:///', 'testing')
>>> tablename = 'dummy_table'
>>> db.add_table(tablename, (
>>>     ('rowid', 'INTEGER PRIMARY KEY'),
>>>     ('value1', 'TEXT'),
>>>     ('value2', 'TEXT NOT NULL'),
>>>     ('value3', 'TEXT DEFAULT 1'),
>>>     ('time_added', "INTEGER DEFAULT (CAST(STRFTIME('%s', 'NOW', 'UTC') AS INTEGER))")
>>> ))
>>> autogen_dict = db.get_table_autogen_dict(tablename)
>>> result = ut.repr2(autogen_dict, nl=2)
>>> print(result)
get_table_autogen_str(tablename)[source]
Parameters

tablename (str) –

Returns

quoted_docstr

Return type

str

CommandLine:

python -m dtool.sql_control get_table_autogen_str

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> db = SQLDatabaseController('sqlite:///', 'testing')
>>> tablename = 'dummy_table'
>>> db.add_table(tablename, (
>>>     ('rowid', 'INTEGER PRIMARY KEY'),
>>>     ('value', 'TEXT'),
>>>     ('time_added', "INTEGER DEFAULT (CAST(STRFTIME('%s', 'NOW', 'UTC') AS INTEGER))")
>>> ))
>>> result = '\n'.join(db.get_table_autogen_str(tablename))
>>> print(result)
get_table_column_data(tablename, columns=None, exclude_columns=[], rowids=None)[source]

Grabs a table of information

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> column_list, column_names = db.get_table_column_data(tablename)
>>> column_list
[[], [], [], [], []]
>>> column_names
['keypoint_rowid', 'chip_rowid', 'config_rowid', 'kpts', 'num']
get_table_constraints(tablename)[source]

TODO: use coldef_list with table_autogen_dict instead

get_table_csv(tablename, exclude_columns=[], rowids=None, truncate=False)[source]

Converts a tablename to csv format

Parameters
  • tablename (str) –

  • exclude_columns (list) –

Returns

csv_table

Return type

str

CommandLine:

python -m dtool.sql_control –test-get_table_csv python -m dtool.sql_control –exec-get_table_csv –tablename=contributors

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> depc.clear_all()
>>> rowids = depc.get_rowids('notch', [1, 2, 3])
>>> table = depc['notch']
>>> db = table.db
>>> ut.exec_funckw(db.get_table_csv, globals())
>>> tablename = 'notch'
>>> csv_table = db.get_table_csv(tablename, exclude_columns, truncate=True)
>>> print(csv_table)
get_table_csv_header(tablename)[source]
get_table_docstr(tablename)[source]
CommandLine:

python -m dtool.sql_control –exec-get_table_docstr

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> result = db.get_table_docstr(tablename)
>>> print(result)
Used to store individual chip features (ellipses)
get_table_names(lazy=False)[source]

Conveinience:

get_table_new_transferdata(tablename, exclude_columns=[])[source]
CommandLine:

python -m dtool.sql_control –test-get_table_column_data python -m dtool.sql_control –test-get_table_new_transferdata python -m dtool.sql_control –test-get_table_new_transferdata:1

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> tablename_list = db.get_table_names()
>>> colrichinfo_list = db.get_columns(tablename)
>>> for tablename in tablename_list:
...     new_transferdata = db.get_table_new_transferdata(tablename)
...     column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata
...     print('tablename = %r' % (tablename,))
...     print('colnames = ' + ut.repr2(column_names))
...     print('extern_colx_list = ' + ut.repr2(extern_colx_list))
...     print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list))
...     print('L___')

Example

>>> # SLOW_DOCTEST
>>> # xdoctest: +REQUIRES(module:wbia)
>>> from wbia.dtool.sql_control import *  # NOQA
>>> import wbia
>>> ibs = wbia.opendb('testdb1')
>>> db = ibs.db
>>> exclude_columns = []
>>> tablename_list = ibs.db.get_table_names()
>>> for tablename in tablename_list:
...     new_transferdata = db.get_table_new_transferdata(tablename)
...     column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata
...     print('tablename = %r' % (tablename,))
...     print('colnames = ' + ut.repr2(column_names))
...     print('extern_colx_list = ' + ut.repr2(extern_colx_list))
...     print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list))
...     print('L___')

Example

>>> # SLOW_DOCTEST
>>> # xdoctest: +REQUIRES(module:wbia)
>>> from wbia.dtool.sql_control import *  # NOQA
>>> import wbia
>>> ibs = wbia.opendb('testdb1')
>>> db = ibs.db
>>> exclude_columns = []
>>> tablename = ibs.const.IMAGE_TABLE
>>> new_transferdata = db.get_table_new_transferdata(tablename)
>>> column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata
>>> dependsmap = db.metadata[tablename].dependsmap
>>> print('tablename = %r' % (tablename,))
>>> print('colnames = ' + ut.repr2(column_names))
>>> print('extern_colx_list = ' + ut.repr2(extern_colx_list))
>>> print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list))
>>> print('dependsmap = %s' % (ut.repr2(dependsmap, nl=True),))
>>> print('L___')
>>> tablename = ibs.const.ANNOTATION_TABLE
>>> new_transferdata = db.get_table_new_transferdata(tablename)
>>> column_list, column_names, extern_colx_list, extern_superkey_colname_list, extern_superkey_colval_list, extern_tablename_list, extern_primarycolnames_list = new_transferdata
>>> dependsmap = db.metadata[tablename].dependsmap
>>> print('tablename = %r' % (tablename,))
>>> print('colnames = ' + ut.repr2(column_names))
>>> print('extern_colx_list = ' + ut.repr2(extern_colx_list))
>>> print('extern_superkey_colname_list = ' + ut.repr2(extern_superkey_colname_list))
>>> print('dependsmap = %s' % (ut.repr2(dependsmap, nl=True),))
>>> print('L___')
get_table_primarykey_colnames(tablename)[source]
get_table_superkey_colnames(tablename)[source]

Actually resturns a list of tuples. need to change the name to get_table_superkey_colnames_list

Parameters

tablename (str) –

Returns

superkeys

Return type

list

CommandLine:

python -m dtool.sql_control –test-get_table_superkey_colnames python -m wbia –tf get_table_superkey_colnames –tablename=contributors python -m wbia –tf get_table_superkey_colnames –db PZ_Master0 –tablename=annotations python -m wbia –tf get_table_superkey_colnames –db PZ_Master0 –tablename=contributors # NOQA

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> db = depc['chip'].db
>>> superkeys = db.get_table_superkey_colnames('chip')
>>> result = ut.repr2(superkeys, nl=False)
>>> print(result)
[('dummy_annot_rowid', 'config_rowid')]
get_where(tblname, colnames, params_iter, where_clause, unpack_scalars=True, eager=True, **kwargs)[source]

Interface to do a SQL select with a where clause

Parameters
  • tblname (str) – table name

  • colnames (tuple[str]) – sequence of column names

  • params_iter (list[dict]) – a sequence of dicts with parameters, where each item in the sequence is used in a SQL execution

  • where_clause (str|Operation) – conditional statement used in the where clause

  • unpack_scalars (bool) – [deprecated] use to unpack a single result from each query only use with operations that return a single result for each query (default: True)

get_where_eq(tblname, colnames, params_iter, where_colnames, unpack_scalars=True, op='AND', batch_size=10000, **kwargs)[source]

Executes a SQL select where the given parameters match/equal the specified where columns.

Parameters
  • tblname (str) – table name

  • colnames (tuple[str]) – sequence of column names

  • params_iter (list[list]) – a sequence of a sequence with parameters, where each item in the sequence is used in a SQL execution

  • where_colnames (list[str]) – column names to match for equality against the same index of the param_iter values

  • op (str) – SQL boolean operator (e.g. AND, OR)

  • unpack_scalars (bool) – [deprecated] use to unpack a single result from each query only use with operations that return a single result for each query (default: True)

get_where_eq_set(tblname, colnames, params_iter, where_colnames, unpack_scalars=True, eager=True, op='AND', **kwargs)[source]
has_table(tablename, colnames=None, lazy=True)[source]

checks if a table exists

integrity()[source]
invalidate_tables_cache()[source]

Invalidates the controller’s cache of table names and objects Resets the caches and/or repopulates them.

property is_using_postgres
property is_using_sqlite
make_json_table_definition(tablename)[source]

VERY HACKY FUNC RIGHT NOW. NEED TO FIX LATER

Parameters

tablename

Returns

new_transferdata

Return type

?

CommandLine:

python -m wbia –tf sql_control.make_json_table_definition

CommandLine:

python -m utool –tf iter_module_doctestable –modname=dtool.sql_control –include_inherited=True python -m dtool.sql_control –exec-make_json_table_definition

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.sql_control import *  # NOQA
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> tablename = 'keypoint'
>>> db = depc[tablename].db
>>> table_def = db.make_json_table_definition(tablename)
>>> result = ('table_def = %s' % (ut.repr2(table_def, nl=True),))
>>> print(result)
table_def = {
    'keypoint_rowid': 'INTEGER',
    'chip_rowid': 'INTEGER',
    'config_rowid': 'INTEGER',
    'kpts': 'NDARRAY',
    'num': 'INTEGER',
}
merge_databases_new(db_src, ignore_tables=None, rowid_subsets=None)[source]

Copies over all non-rowid properties into another sql table. handles annotated dependenceis. Does not handle external files Could handle dependency tree order, but not yet implemented.

FINISHME

Parameters

db_src (SQLController) – merge data from db_src into db

CommandLine:

python -m dtool.sql_control –test-merge_databases_new:0 python -m dtool.sql_control –test-merge_databases_new:2

Example

>>> # DISABLE_DOCTEST
>>> # xdoctest: +REQUIRES(module:wbia)
>>> from wbia.dtool.sql_control import *  # NOQA
>>> import wbia
>>> #ibs_dst = wbia.opendb(dbdir='testdb_dst')
>>> ibs_src = wbia.opendb(db='testdb1')
>>> # OPEN A CLEAN DATABASE
>>> ibs_dst = wbia.opendb(dbdir='test_sql_merge_dst1', allow_newdir=True, delete_ibsdir=True)
>>> ibs_src.ensure_contributor_rowids()
>>> # build test data
>>> db = ibs_dst.db
>>> db_src = ibs_src.db
>>> rowid_subsets = None
>>> # execute function
>>> db.merge_databases_new(db_src)

Example

>>> # DISABLE_DOCTEST
>>> # xdoctest: +REQUIRES(module:wbia)
>>> from wbia.dtool.sql_control import *  # NOQA
>>> import wbia
>>> ibs_src = wbia.opendb(db='testdb2')
>>> # OPEN A CLEAN DATABASE
>>> ibs_dst = wbia.opendb(dbdir='test_sql_merge_dst2', allow_newdir=True, delete_ibsdir=True)
>>> ibs_src.ensure_contributor_rowids()
>>> # build test data
>>> db = ibs_dst.db
>>> db_src = ibs_src.db
>>> ignore_tables = ['lblannot', 'lblimage', 'image_lblimage_relationship', 'annotation_lblannot_relationship', 'keys']
>>> rowid_subsets = None
>>> # execute function
>>> db.merge_databases_new(db_src, ignore_tables=ignore_tables)

Example

>>> # DISABLE_DOCTEST
>>> # xdoctest: +REQUIRES(module:wbia)
>>> from wbia.dtool.sql_control import *  # NOQA
>>> import wbia
>>> ibs_src = wbia.opendb(db='testdb2')
>>> # OPEN A CLEAN DATABASE
>>> ibs_src.fix_invalid_annotmatches()
>>> ibs_dst = wbia.opendb(dbdir='test_sql_subexport_dst2', allow_newdir=True, delete_ibsdir=True)
>>> ibs_src.ensure_contributor_rowids()
>>> # build test data
>>> db = ibs_dst.db
>>> db_src = ibs_src.db
>>> ignore_tables = ['lblannot', 'lblimage', 'image_lblimage_relationship', 'annotation_lblannot_relationship', 'keys']
>>> # execute function
>>> aid_subset = [1, 2, 3]
>>> rowid_subsets = {ANNOTATION_TABLE: aid_subset,
...                  NAME_TABLE: ibs_src.get_annot_nids(aid_subset),
...                  IMAGE_TABLE: ibs_src.get_annot_gids(aid_subset),
...                  ANNOTMATCH_TABLE: [],
...                  GSG_RELATION_TABLE: [],
...                  }
>>> db.merge_databases_new(db_src, ignore_tables=ignore_tables, rowid_subsets=rowid_subsets)
modify_table(tablename=None, colmap_list=None, tablename_new=None, drop_columns=[], add_columns=[], rename_columns=[], **metadata_keyval)[source]

function to modify the schema - only columns that are being added, removed or changed need to be enumerated

Parameters
  • tablename (str) – tablename

  • colmap_list (list) – of tuples (orig_colname, new_colname, new_coltype, convert_func) orig_colname - the original name of the column, None to append, int for index new_colname - the new column name (’’ for same, None to delete) new_coltype - New Column Type. None to use data unmodified convert_func - Function to convert data from old to new

  • constraint (str) –

  • superkeys (list) –

  • docstr (str) –

  • tablename_new

Example

>>> # DISABLE_DOCTEST
>>> def loc_zip_map(x):
...     return x
>>> db.modify_table(const.CONTRIBUTOR_TABLE, (
>>>         # orig_colname,             new_colname,      new_coltype, convert_func
>>>         # a non-needed, but correct mapping (identity function)
>>>         ('contrib_rowid',      '',                    '',               None),
>>>         # for new columns, function is ignored (TYPE CANNOT BE EMPTY IF ADDING)
>>>         (None,                 'contrib_loc_address', 'TEXT',           None),
>>>         # adding a new column at index 4 (if index is invalid, None is used)
>>>         (4,                    'contrib_loc_address', 'TEXT',           None),
>>>         # for deleted columns, type and function are ignored
>>>         ('contrib_loc_city',    None,                 '',               None),
>>>         # for renamed columns, type and function are ignored
>>>         ('contrib_loc_city',   'contrib_loc_town',    '',       None),
>>>         ('contrib_loc_zip',    'contrib_loc_zip',     'TEXT',   loc_zip_map),
>>>         # type not changing, only NOT NULL provision
>>>         ('contrib_loc_country', '',                   'TEXT NOT NULL',  None),
>>>     ),
>>>     superkeys=[('contributor_rowid',)],
>>>     constraint=[],
>>>     docstr='Used to store the contributors to the project'
>>> )
optimize()[source]
print_dbg_schema()[source]
print_schema()[source]
print_table_csv(tablename, exclude_columns=[], truncate=False)[source]
reboot()[source]
rename_table(tablename_old, tablename_new, invalidate_cache=True)[source]
rows_exist(tblname, rowids)[source]

Checks if rowids exist. Yields True if they do

rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

property schema_name

The name of the namespace schema (using with Postgres).

set(tblname, colnames, val_iter, id_iter, id_colname='rowid', duplicate_behavior='error', duplcate_auto_resolve=True, **kwargs)[source]

setter

CommandLine:

python -m dtool.sql_control set

Example

>>> # ENABLE_DOCTEST
>>> from wbia.dtool.example_depcache import testdata_depc
>>> depc = testdata_depc()
>>> depc.clear_all()
>>> rowids = depc.get_rowids('notch', [1, 2, 3])
>>> table = depc['notch']
>>> db = table.db
>>> table.print_csv()
>>> # Break things to test set
>>> colnames = ('dummy_annot_rowid',)
>>> val_iter = [(9003,), (9001,), (9002,)]
>>> orig_data = db.get('notch', colnames, id_iter=rowids)
>>> db.set('notch', colnames, val_iter, id_iter=rowids)
>>> new_data = db.get('notch', colnames, id_iter=rowids)
>>> assert new_data == [x[0] for x in val_iter]
>>> assert new_data != orig_data
>>> table.print_csv()
>>> depc.clear_all()
set_db_version(version)[source]
set_metadata_val(key, val)[source]

key must be given as a repr-ed string

shrink_memory()[source]
squeeze()[source]
property tablenames
vacuum()[source]
view_db_in_external_reader()[source]
class wbia.dtool.sql_control.SQLTable(db, name)[source]

Bases: utool.util_dev.NiceRepr

convinience object for dealing with a specific table

table = db table = SQLTable(db, ‘annotmatch’)

as_pandas(rowids=None, columns=None)[source]
clear()[source]
delete(rowids)[source]
get(colnames, id_iter, id_colname='rowid', eager=True)[source]
number_of_rows()[source]
rowids()[source]
rrr(verbose=True, reload_module=True)

special class reloading function This function is often injected as rrr of classes

wbia.dtool.sql_control.compare_coldef_lists(coldef_list1, coldef_list2)[source]
wbia.dtool.sql_control.create_engine(uri, POSTGRESQL_POOL_SIZE=20, ENGINES={}, timeout=600)[source]
wbia.dtool.sql_control.sanitize_sql(db, tablename_, columns=None)[source]

Sanatizes an sql tablename and column. Use sparingly

wbia.dtool.sql_control.tuplize(list_)[source]

Converts each scalar item in a list to a dimension-1 tuple

Module contents