From af65146ef69fcde10d53b2ee12f82a9c28361e23 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 22 Oct 2019 23:50:31 -0700 Subject: Preserve comments for Spack YAML objects (#11602) This updates the configuration loading/dumping logic (now called load_config/dump_config) in spack_yaml to preserve comments (by using ruamel.yaml's RoundTripLoader). This has two effects: * environment spack.yaml files expect to retain comments, which load_config now supports. By using load_config, users can now use the ':' override syntax that was previously unavailable for environment configs (but was available for other config files). * config files now retain user comments by default (although in cases where Spack updates/overwrites config, the comments can still be removed). Details: * Subclasses `RoundTripLoader`/`RoundTripDumper` to parse yaml into ruamel's `CommentedMap` and analogous data structures * Applies filename info directly to ruamel objects in cases where the updated loader returns those * Copies management of sections in `SingleFileScope` from #10651 to allow overrides to occur * Updates the loader/dumper to handle the processing of overrides by specifically checking for the `:` character * Possibly the most controversial aspect, but without that, the parsed objects have to be reconstructed (i.e. as was done in `mark_overrides`). It is possible that `mark_overrides` could remain and a deep copy will not cause problems, but IMO that's generally worth avoiding. * This is also possibly controversial because Spack YAML strings can include `:`. My reckoning is that this only occurs for version specifications, so it is safe to check for `endswith(':') and not ('@' in string)` * As a consequence, this PR ends up reserving spack yaml functions load_config/dump_config exclusively for the purpose of storing spack config --- lib/spack/spack/binary_distribution.py | 6 +- lib/spack/spack/cmd/release_jobs.py | 2 +- lib/spack/spack/config.py | 79 +++++------ lib/spack/spack/environment.py | 7 +- lib/spack/spack/filesystem_view.py | 2 +- lib/spack/spack/hooks/yaml_version_check.py | 2 +- lib/spack/spack/modules/common.py | 2 +- lib/spack/spack/spec.py | 11 +- lib/spack/spack/test/cmd/release_jobs.py | 5 +- lib/spack/spack/test/concretize_preferences.py | 10 +- lib/spack/spack/test/config.py | 39 +++++- lib/spack/spack/test/spack_yaml.py | 2 +- lib/spack/spack/util/spack_yaml.py | 182 +++++++++++-------------- 13 files changed, 171 insertions(+), 178 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index aab07b8a84..eafa1dcdb1 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -11,6 +11,7 @@ import shutil import tempfile import hashlib from contextlib import closing +import ruamel.yaml as yaml import json @@ -139,7 +140,7 @@ def read_buildinfo_file(prefix): filename = buildinfo_file_name(prefix) with open(filename, 'r') as inputfile: content = inputfile.read() - buildinfo = syaml.load(content) + buildinfo = yaml.load(content) return buildinfo @@ -380,10 +381,9 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, checksum = checksum_tarball(tarfile_path) # add sha256 checksum to spec.yaml - spec_dict = {} with open(spec_file, 'r') as inputfile: content = inputfile.read() - spec_dict = syaml.load(content) + spec_dict = yaml.load(content) bchecksum = {} bchecksum['hash_algorithm'] = 'sha256' bchecksum['hash'] = checksum diff --git a/lib/spack/spack/cmd/release_jobs.py b/lib/spack/spack/cmd/release_jobs.py index 65ba42053d..c5acc4ee2d 100644 --- a/lib/spack/spack/cmd/release_jobs.py +++ b/lib/spack/spack/cmd/release_jobs.py @@ -655,4 +655,4 @@ def release_jobs(parser, args): output_object['stages'] = stage_names with open(args.output_file, 'w') as outf: - outf.write(syaml.dump(output_object)) + outf.write(syaml.dump_config(output_object, default_flow_style=True)) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 1af8ad757b..3f536f3793 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -36,7 +36,6 @@ import re import sys import multiprocessing from contextlib import contextmanager -from six import string_types from six import iteritems from ordereddict_backport import OrderedDict @@ -155,7 +154,7 @@ class ConfigScope(object): mkdirp(self.path) with open(filename, 'w') as f: validate(data, section_schemas[section]) - syaml.dump(data, stream=f, default_flow_style=False) + syaml.dump_config(data, stream=f, default_flow_style=False) except (yaml.YAMLError, IOError) as e: raise ConfigFileError( "Error writing to config file: '%s'" % str(e)) @@ -200,6 +199,22 @@ class SingleFileScope(ConfigScope): # ... data ... # }, # } + # + # To preserve overrides up to the section level (e.g. to override + # the "packages" section with the "::" syntax), data in self.sections + # looks like this: + # { + # 'config': { + # 'config': { + # ... data ... + # } + # }, + # 'packages': { + # 'packages': { + # ... data ... + # } + # } + # } if self._raw_data is None: self._raw_data = _read_config_file(self.path, self.schema) if self._raw_data is None: @@ -215,29 +230,10 @@ class SingleFileScope(ConfigScope): self._raw_data = self._raw_data[key] - # data in self.sections looks (awkwardly) like this: - # { - # 'config': { - # 'config': { - # ... data ... - # } - # }, - # 'packages': { - # 'packages': { - # ... data ... - # } - # } - # } - # - # UNLESS there is no section, in which case it is stored as: - # { - # 'config': None, - # ... - # } - value = self._raw_data.get(section) - self.sections.setdefault( - section, None if value is None else {section: value}) - return self.sections[section] + for section_key, data in self._raw_data.items(): + self.sections[section_key] = {section_key: data} + + return self.sections.get(section, None) def write_section(self, section): validate(self.sections, self.schema) @@ -247,7 +243,8 @@ class SingleFileScope(ConfigScope): tmp = os.path.join(parent, '.%s.tmp' % self.path) with open(tmp, 'w') as f: - syaml.dump(self.sections, stream=f, default_flow_style=False) + syaml.dump_config(self.sections, stream=f, + default_flow_style=False) os.path.move(tmp, self.path) except (yaml.YAMLError, IOError) as e: raise ConfigFileError( @@ -533,7 +530,7 @@ class Configuration(object): try: data = syaml.syaml_dict() data[section] = self.get_config(section) - syaml.dump( + syaml.dump_config( data, stream=sys.stdout, default_flow_style=False, blame=blame) except (yaml.YAMLError, IOError): raise ConfigError("Error reading configuration: %s" % section) @@ -708,7 +705,7 @@ def _read_config_file(filename, schema): try: tty.debug("Reading config file %s" % filename) with open(filename) as f: - data = _mark_overrides(syaml.load(f)) + data = syaml.load_config(f) if data: validate(data, schema) @@ -734,23 +731,6 @@ def _override(string): return hasattr(string, 'override') and string.override -def _mark_overrides(data): - if isinstance(data, list): - return syaml.syaml_list(_mark_overrides(elt) for elt in data) - - elif isinstance(data, dict): - marked = syaml.syaml_dict() - for key, val in iteritems(data): - if isinstance(key, string_types) and key.endswith(':'): - key = syaml.syaml_str(key[:-1]) - key.override = True - marked[key] = _mark_overrides(val) - return marked - - else: - return data - - def _mark_internal(data, name): """Add a simple name mark to raw YAML/JSON data. @@ -820,9 +800,14 @@ def _merge_yaml(dest, source): # ensure that keys are marked in the destination. the key_marks dict # ensures we can get the actual source key objects from dest keys - for dk in dest.keys(): - if dk in key_marks: + for dk in list(dest.keys()): + if dk in key_marks and syaml.markable(dk): syaml.mark(dk, key_marks[dk]) + elif dk in key_marks: + # The destination key may not be markable if it was derived + # from a schema default. In this case replace the key. + val = dest.pop(dk) + dest[key_marks[dk]] = val return dest diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 5204f7d288..b6b1814819 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -11,7 +11,6 @@ import shutil import copy import socket -import ruamel.yaml import six from ordereddict_backport import OrderedDict @@ -27,6 +26,7 @@ import spack.repo import spack.schema.env import spack.spec import spack.util.spack_json as sjson +import spack.util.spack_yaml as syaml import spack.config import spack.build_environment as build_env @@ -401,7 +401,7 @@ def validate(data, filename=None): def _read_yaml(str_or_file): """Read YAML from a file for round-trip parsing.""" - data = ruamel.yaml.load(str_or_file, ruamel.yaml.RoundTripLoader) + data = syaml.load_config(str_or_file) filename = getattr(str_or_file, 'name', None) validate(data, filename) return data @@ -411,8 +411,7 @@ def _write_yaml(data, str_or_file): """Write YAML to a file preserving comments and dict order.""" filename = getattr(str_or_file, 'name', None) validate(data, filename) - ruamel.yaml.dump(data, str_or_file, Dumper=ruamel.yaml.RoundTripDumper, - default_flow_style=False) + syaml.dump_config(data, str_or_file, default_flow_style=False) def _eval_conditional(string): diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index 417466cd83..448254f26b 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -211,7 +211,7 @@ class YamlFilesystemView(FilesystemView): if self.projections: mkdirp(os.path.dirname(self.projections_path)) with open(self.projections_path, 'w') as f: - f.write(s_yaml.dump({'projections': self.projections})) + f.write(s_yaml.dump_config({'projections': self.projections})) def read_projections(self): if os.path.exists(self.projections_path): diff --git a/lib/spack/spack/hooks/yaml_version_check.py b/lib/spack/spack/hooks/yaml_version_check.py index 76ebceab79..ff8be58481 100644 --- a/lib/spack/spack/hooks/yaml_version_check.py +++ b/lib/spack/spack/hooks/yaml_version_check.py @@ -24,7 +24,7 @@ def check_compiler_yaml_version(): data = None if os.path.isfile(file_name): with open(file_name) as f: - data = syaml.load(f) + data = syaml.load_config(f) if data: compilers = data.get('compilers') diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index d38ede1385..434d09c125 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -235,7 +235,7 @@ def generate_module_index(root, modules): index_path = os.path.join(root, 'module-index.yaml') llnl.util.filesystem.mkdirp(root) with open(index_path, 'w') as index_file: - syaml.dump(index, index_file, default_flow_style=False) + syaml.dump(index, default_flow_style=False, stream=index_file) def _generate_upstream_module_index(): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1a6cc5f6bd..cb4942b157 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -79,7 +79,6 @@ expansion when it is the first character in an id typed on the command line. import base64 import sys import collections -import ctypes import hashlib import itertools import os @@ -88,6 +87,7 @@ import re import six from operator import attrgetter +import ruamel.yaml as yaml from llnl.util.filesystem import find_headers, find_libraries, is_exe from llnl.util.lang import key_ordering, HashableMap, ObjectWrapper, dedupe @@ -185,9 +185,6 @@ _separators = '[\\%s]' % '\\'.join(color_formats.keys()) #: every time we call str() _any_version = VersionList([':']) -#: Max integer helps avoid passing too large a value to cyaml. -maxint = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1 - default_format = '{name}{@version}' default_format += '{%compiler.name}{@compiler.version}{compiler_flags}' default_format += '{variants}{arch=architecture}' @@ -1366,8 +1363,8 @@ class Spec(object): """ # TODO: curently we strip build dependencies by default. Rethink # this when we move to using package hashing on all specs. - yaml_text = syaml.dump(self.to_node_dict(hash=hash), - default_flow_style=True, width=maxint) + yaml_text = syaml.dump( + self.to_node_dict(hash=hash), default_flow_style=True) sha = hashlib.sha1(yaml_text.encode('utf-8')) b32_hash = base64.b32encode(sha.digest()).lower() @@ -1937,7 +1934,7 @@ class Spec(object): stream -- string or file object to read from. """ try: - data = syaml.load(stream) + data = yaml.load(stream) return Spec.from_dict(data) except MarkedYAMLError as e: raise syaml.SpackYAMLError("error parsing YAML spec:", str(e)) diff --git a/lib/spack/spack/test/cmd/release_jobs.py b/lib/spack/spack/test/cmd/release_jobs.py index c4f44308d9..0dbe8c5d5c 100644 --- a/lib/spack/spack/test/cmd/release_jobs.py +++ b/lib/spack/spack/test/cmd/release_jobs.py @@ -5,6 +5,7 @@ import os import pytest +import re import spack import spack.environment as ev @@ -122,6 +123,6 @@ spack: release_jobs('--output-file', outputfile) with open(outputfile) as f: - contents = f.read() + contents = f.read().replace(os.linesep, '') assert('archive-files' in contents) - assert('stages: [stage-0' in contents) + assert(re.search(r'stages:\s*\[\s*stage-0', contents)) diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 5563bad75b..e5fa4a851d 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -29,7 +29,7 @@ def concretize_scope(config, tmpdir): @pytest.fixture() def configure_permissions(): - conf = syaml.load("""\ + conf = syaml.load_config("""\ all: permissions: read: group @@ -182,9 +182,9 @@ class TestConcretizePreferences(object): """Verify that virtuals are not allowed in packages.yaml.""" # set up a packages.yaml file with a vdep as a key. We use - # syaml.load here to make sure source lines in the config are + # syaml.load_config here to make sure source lines in the config are # attached to parsed strings, as the error message uses them. - conf = syaml.load("""\ + conf = syaml.load_config("""\ mpi: paths: mpi-with-lapack@2.1: /path/to/lapack @@ -197,7 +197,7 @@ mpi: def test_all_is_not_a_virtual(self): """Verify that `all` is allowed in packages.yaml.""" - conf = syaml.load("""\ + conf = syaml.load_config("""\ all: variants: [+mpi] """) @@ -214,7 +214,7 @@ all: assert not spec['mpi'].external # load config - conf = syaml.load("""\ + conf = syaml.load_config("""\ all: providers: mpi: [mpich] diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 3b85bb2a23..605c37d468 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -12,7 +12,6 @@ from six import StringIO from llnl.util.filesystem import touch, mkdirp import pytest -import ruamel.yaml as yaml import spack.paths import spack.config @@ -57,7 +56,7 @@ def write_config_file(tmpdir): config_yaml = tmpdir.join(scope, config + '.yaml') config_yaml.ensure() with config_yaml.open('w') as f: - yaml.dump(data, f) + syaml.dump_config(data, f) return _write @@ -721,10 +720,44 @@ env: '/x/y/z', '$spack/var/spack/repos/builtin'] +def test_single_file_scope_section_override(tmpdir, config): + """Check that individual config sections can be overridden in an + environment config. The config here primarily differs in that the + ``packages`` section is intended to override all other scopes (using the + "::" syntax). + """ + env_yaml = str(tmpdir.join("env.yaml")) + with open(env_yaml, 'w') as f: + f.write("""\ +env: + config: + verify_ssl: False + packages:: + libelf: + compiler: [ 'gcc@4.5.3' ] + repos: + - /x/y/z +""") + + scope = spack.config.SingleFileScope( + 'env', env_yaml, spack.schema.env.schema, ['env']) + + with spack.config.override(scope): + # from the single-file config + assert spack.config.get('config:verify_ssl') is False + assert spack.config.get('packages:libelf:compiler') == ['gcc@4.5.3'] + + # from the lower config scopes + assert spack.config.get('config:checksum') is True + assert not spack.config.get('packages:externalmodule') + assert spack.config.get('repos') == [ + '/x/y/z', '$spack/var/spack/repos/builtin'] + + def check_schema(name, file_contents): """Check a Spack YAML schema against some data""" f = StringIO(file_contents) - data = syaml.load(f) + data = syaml.load_config(f) spack.config.validate(data, name) diff --git a/lib/spack/spack/test/spack_yaml.py b/lib/spack/spack/test/spack_yaml.py index 41a7c0e015..2166d294e9 100644 --- a/lib/spack/spack/test/spack_yaml.py +++ b/lib/spack/spack/test/spack_yaml.py @@ -27,7 +27,7 @@ config_file: [ 1, 2, 3 ] some_key: some_string """ - return syaml.load(test_file) + return syaml.load_config(test_file) def test_parse(data): diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py index f08be5fdb5..c4910be8bd 100644 --- a/lib/spack/spack/util/spack_yaml.py +++ b/lib/spack/spack/util/spack_yaml.py @@ -12,13 +12,14 @@ default unorderd dict. """ +import ctypes + + from ordereddict_backport import OrderedDict from six import string_types, StringIO import ruamel.yaml as yaml -from ruamel.yaml import Loader, Dumper -from ruamel.yaml.nodes import MappingNode, SequenceNode, ScalarNode -from ruamel.yaml.constructor import ConstructorError +from ruamel.yaml import RoundTripLoader, RoundTripDumper from llnl.util.tty.color import colorize, clen, cextra @@ -58,6 +59,11 @@ syaml_types = { } +markable_types = set(syaml_types) | set([ + yaml.comments.CommentedSeq, + yaml.comments.CommentedMap]) + + def syaml_type(obj): """Get the corresponding syaml wrapper type for a primitive type. @@ -72,19 +78,15 @@ def syaml_type(obj): def markable(obj): """Whether an object can be marked.""" - return type(obj) in syaml_types + return type(obj) in markable_types def mark(obj, node): """Add start and end markers to an object.""" - if not markable(obj): - return - if hasattr(node, 'start_mark'): obj._start_mark = node.start_mark elif hasattr(node, '_start_mark'): obj._start_mark = node._start_mark - if hasattr(node, 'end_mark'): obj._end_mark = node.end_mark elif hasattr(node, '_end_mark'): @@ -97,8 +99,11 @@ def marked(obj): hasattr(obj, '_end_mark') and obj._end_mark) -class OrderedLineLoader(Loader): - """YAML loader that preserves order and line numbers. +class OrderedLineLoader(RoundTripLoader): + """YAML loader specifically intended for reading Spack configuration + files. It preserves order and line numbers. It also has special-purpose + logic for handling dictionary keys that indicate a Spack config + override: namely any key that contains an "extra" ':' character. Mappings read in by this loader behave like an ordered dict. Sequences, mappings, and strings also have new attributes, @@ -107,74 +112,46 @@ class OrderedLineLoader(Loader): """ # - # Override construct_yaml_* so that they build our derived types, - # which allows us to add new attributes to them. + # Override construct_yaml_* so that we can apply _start_mark/_end_mark to + # them. The superclass returns CommentedMap/CommentedSeq objects that we + # can add attributes to (and we depend on their behavior to preserve + # comments). # - # The standard YAML constructors return empty instances and fill - # in with mappings later. We preserve this behavior. + # The inherited sequence/dictionary constructors return empty instances + # and fill in with mappings later. We preserve this behavior. # def construct_yaml_str(self, node): - value = self.construct_scalar(node) - value = syaml_str(value) + value = super(OrderedLineLoader, self).construct_yaml_str(node) + # There is no specific marker to indicate that we are parsing a key, + # so this assumes we are talking about a Spack config override key if + # it ends with a ':' and does not contain a '@' (which can appear + # in config values that refer to Spack specs) + if value and value.endswith(':') and '@' not in value: + value = syaml_str(value[:-1]) + value.override = True + else: + value = syaml_str(value) mark(value, node) return value def construct_yaml_seq(self, node): - data = syaml_list() - mark(data, node) + gen = super(OrderedLineLoader, self).construct_yaml_seq(node) + data = next(gen) + if markable(data): + mark(data, node) yield data - data.extend(self.construct_sequence(node)) + for x in gen: + pass def construct_yaml_map(self, node): - data = syaml_dict() - mark(data, node) + gen = super(OrderedLineLoader, self).construct_yaml_map(node) + data = next(gen) + if markable(data): + mark(data, node) yield data - value = self.construct_mapping(node) - data.update(value) - - # - # Override the ``construct_*`` routines. These fill in empty - # objects after yielded by the above ``construct_yaml_*`` methods. - # - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError( - None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - value = syaml_list(self.construct_object(child, deep=deep) - for child in node.value) - mark(value, node) - return value - - def construct_mapping(self, node, deep=False): - """Store mappings as OrderedDicts instead of as regular python - dictionaries to preserve file ordering.""" - if not isinstance(node, MappingNode): - raise ConstructorError( - None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - - mapping = syaml_dict() - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - try: - hash(key) - except TypeError as exc: - raise ConstructorError( - "while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - if key in mapping: - raise ConstructorError( - "while constructing a mapping", node.start_mark, - "found already in-use key (%s)" % key, key_node.start_mark) - mapping[key] = value - - mark(mapping, node) - return mapping + for x in gen: + pass # register above new constructors @@ -186,7 +163,7 @@ OrderedLineLoader.add_constructor( 'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) -class OrderedLineDumper(Dumper): +class OrderedLineDumper(RoundTripDumper): """Dumper that preserves ordering and formats ``syaml_*`` objects. This dumper preserves insertion ordering ``syaml_dict`` objects @@ -196,41 +173,15 @@ class OrderedLineDumper(Dumper): """ - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - # if it's a syaml_dict, preserve OrderedDict order. - # Otherwise do the default thing. - sort = not isinstance(mapping, syaml_dict) - mapping = list(mapping.items()) - if sort: - mapping.sort() - - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and - not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - - return node - def ignore_aliases(self, _data): """Make the dumper NEVER print YAML aliases.""" return True + def represent_str(self, data): + if hasattr(data, 'override') and data.override: + data = data + ':' + return super(OrderedLineDumper, self).represent_str(data) + # Make our special objects look like normal YAML ones. OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict) @@ -239,6 +190,28 @@ OrderedLineDumper.add_representer(syaml_str, OrderedLineDumper.represent_str) OrderedLineDumper.add_representer(syaml_int, OrderedLineDumper.represent_int) +class SafeDumper(RoundTripDumper): + + def ignore_aliases(self, _data): + """Make the dumper NEVER print YAML aliases.""" + return True + + +# Allow syaml_dict objects to be represented by ruamel.yaml.dump. With this, +# syaml_dict allows a user to provide an ordered dictionary to yaml.dump when +# the RoundTripDumper is used. +RoundTripDumper.add_representer(syaml_dict, RoundTripDumper.represent_dict) + + +#: Max integer helps avoid passing too large a value to cyaml. +maxint = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1 + + +def dump(obj, default_flow_style=False, stream=None): + return yaml.dump(obj, default_flow_style=default_flow_style, width=maxint, + Dumper=SafeDumper, stream=stream) + + def file_line(mark): """Format a mark as : information.""" result = mark.name @@ -288,7 +261,8 @@ class LineAnnotationDumper(OrderedLineDumper): result = super(LineAnnotationDumper, self).represent_data(data) if isinstance(result.value, string_types): result.value = syaml_str(data) - mark(result.value, data) + if markable(result.value): + mark(result.value, data) return result def write_stream_start(self): @@ -319,14 +293,18 @@ class LineAnnotationDumper(OrderedLineDumper): _annotations.append('') -def load(*args, **kwargs): +def load_config(*args, **kwargs): """Load but modify the loader instance so that it will add __line__ - atrributes to the returned object.""" + attributes to the returned object.""" kwargs['Loader'] = OrderedLineLoader return yaml.load(*args, **kwargs) -def dump(*args, **kwargs): +def load(*args, **kwargs): + return yaml.load(*args, **kwargs) + + +def dump_config(*args, **kwargs): blame = kwargs.pop('blame', False) if blame: -- cgit v1.2.3-70-g09d2