Snap for 8426163 from ca387011d3123308b27f0cf2abb72c3819baba4d to mainline-tzdata2-release

Change-Id: I8beb931294e2edc38ef70305f0519bc91c8365f3
diff --git a/Android.bp b/Android.bp
index e0fb5d9..76d69e5 100644
--- a/Android.bp
+++ b/Android.bp
@@ -12,36 +12,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package {
-    default_applicable_licenses: ["system_libufdt_license"],
-}
-
-// Added automatically by a large-scale-change that took the approach of
-// 'apply every license found to every target'. While this makes sure we respect
-// every license restriction, it may not be entirely correct.
-//
-// e.g. GPL in an MIT project might only apply to the contrib/ directory.
-//
-// Please consider splitting the single license below into multiple licenses,
-// taking care not to lose any license_kind information, and overriding the
-// default license using the 'licenses: [...]' property on targets as needed.
-//
-// For unused files, consider creating a 'fileGroup' with "//visibility:private"
-// to attach the license to, and including a comment whether the files may be
-// used in the current project.
-// See: http://go/android-license-faq
-license {
-    name: "system_libufdt_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-        "SPDX-license-identifier-BSD",
-    ],
-    license_text: [
-        "NOTICE",
-    ],
-}
-
 subdirs = [
     "sysdeps",
     "tests/libufdt_verify",
diff --git a/METADATA b/METADATA
deleted file mode 100644
index d97975c..0000000
--- a/METADATA
+++ /dev/null
@@ -1,3 +0,0 @@
-third_party {
-  license_type: NOTICE
-}
diff --git a/OWNERS b/OWNERS
index 906cba6..f512bb3 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,3 +1,2 @@
 bowgotsai@google.com
-hridya@google.com
 szuweilin@google.com
diff --git a/sysdeps/Android.bp b/sysdeps/Android.bp
index 7babbda..772e01a 100644
--- a/sysdeps/Android.bp
+++ b/sysdeps/Android.bp
@@ -12,16 +12,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "system_libufdt_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-BSD
-    default_applicable_licenses: ["system_libufdt_license"],
-}
-
 cc_library_static {
     name: "libufdt_sysdeps",
     host_supported: true,
diff --git a/tests/libufdt_verify/Android.bp b/tests/libufdt_verify/Android.bp
index 4258c65..bb07f79 100644
--- a/tests/libufdt_verify/Android.bp
+++ b/tests/libufdt_verify/Android.bp
@@ -12,15 +12,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "system_libufdt_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["system_libufdt_license"],
-}
-
 cc_library_static {
     name: "libufdt_verify",
     host_supported: true,
diff --git a/tests/src/Android.bp b/tests/src/Android.bp
index 10ba099..c9a66d2 100644
--- a/tests/src/Android.bp
+++ b/tests/src/Android.bp
@@ -12,15 +12,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "system_libufdt_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["system_libufdt_license"],
-}
-
 libufdt_tests_cflags = [
     "-Wall",
     "-Werror",
diff --git a/ufdt_overlay.c b/ufdt_overlay.c
index c432cd4..019545f 100644
--- a/ufdt_overlay.c
+++ b/ufdt_overlay.c
@@ -363,18 +363,18 @@
   struct ufdt_node *target_node = NULL;
   struct ufdt_node *overlay_node = NULL;
 
-  overlay_node = ufdt_node_get_node_by_path(frag_node, "__overlay__");
-  if (overlay_node == NULL) {
-    return OVERLAY_RESULT_MISSING_OVERLAY;
-  }
-
   enum overlay_result result =
       ufdt_overlay_get_target(tree, frag_node, &target_node);
   if (target_node == NULL) {
-    dto_error("Unable to resolve target for %s\n", ufdt_node_name(frag_node));
     return result;
   }
 
+  overlay_node = ufdt_node_get_node_by_path(frag_node, "__overlay__");
+  if (overlay_node == NULL) {
+    dto_error("missing __overlay__ sub-node\n");
+    return OVERLAY_RESULT_MISSING_OVERLAY;
+  }
+
   int err = ufdt_overlay_node(target_node, overlay_node, pool);
 
   if (err < 0) {
@@ -392,17 +392,15 @@
 static int ufdt_overlay_apply_fragments(struct ufdt *main_tree,
                                         struct ufdt *overlay_tree,
                                         struct ufdt_node_pool *pool) {
-  enum overlay_result ret;
+  enum overlay_result err;
   struct ufdt_node **it;
   /*
    * This loop may iterate to subnodes that's not a fragment node.
-   * We must fail for any other error.
+   * In such case, ufdt_apply_fragment would fail with return value = -1.
    */
   for_each_node(it, overlay_tree->root) {
-    ret = ufdt_apply_fragment(main_tree, *it, pool);
-    if ((ret != OVERLAY_RESULT_OK) && (ret != OVERLAY_RESULT_MISSING_OVERLAY)) {
-      dto_error("failed to apply overlay fragment %s ret: %d\n",
-                ufdt_node_name(*it), ret);
+    err = ufdt_apply_fragment(main_tree, *it, pool);
+    if (err == OVERLAY_RESULT_MERGE_FAIL) {
       return -1;
     }
   }
diff --git a/utils/src/Android.bp b/utils/src/Android.bp
index 03f4aa6..e02a922 100644
--- a/utils/src/Android.bp
+++ b/utils/src/Android.bp
@@ -13,15 +13,6 @@
 // limitations under the License.
 
 //##################################################
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "system_libufdt_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["system_libufdt_license"],
-}
-
 cc_binary_host {
     name: "mkdtimg",
     cflags: [
diff --git a/utils/src/mkdtboimg.py b/utils/src/mkdtboimg.py
index bc33e1f..03f0fd1 100755
--- a/utils/src/mkdtboimg.py
+++ b/utils/src/mkdtboimg.py
@@ -18,13 +18,12 @@
 """Tool for packing multiple DTB/DTBO files into a single image"""
 
 import argparse
-import fnmatch
 import os
-import struct
-import zlib
 from array import array
 from collections import namedtuple
+import struct
 from sys import stdout
+import zlib
 
 class CompressionFormat(object):
     """Enum representing DT compression format for a DT entry.
@@ -37,18 +36,14 @@
     """Provides individual DT image file arguments to be added to a DTBO.
 
     Attributes:
-        REQUIRED_KEYS_V0: 'keys' needed to be present in the dictionary passed to instantiate
-            an object of this class when a DTBO header of version 0 is used.
-        REQUIRED_KEYS_V1: 'keys' needed to be present in the dictionary passed to instantiate
-            an object of this class when a DTBO header of version 1 is used.
-        COMPRESSION_FORMAT_MASK: Mask to retrieve compression info for DT entry from flags field
+        _REQUIRED_KEYS: 'keys' needed to be present in the dictionary passed to instantiate
+            an object of this class.
+        _COMPRESSION_FORMAT_MASK: Mask to retrieve compression info for DT entry from flags field
             when a DTBO header of version 1 is used.
     """
-    COMPRESSION_FORMAT_MASK = 0x0f
-    REQUIRED_KEYS_V0 = ('dt_file', 'dt_size', 'dt_offset', 'id', 'rev',
-                     'custom0', 'custom1', 'custom2', 'custom3')
-    REQUIRED_KEYS_V1 = ('dt_file', 'dt_size', 'dt_offset', 'id', 'rev',
-                     'flags', 'custom0', 'custom1', 'custom2')
+    _COMPRESSION_FORMAT_MASK = 0x0f
+    REQUIRED_KEYS = ('dt_file', 'dt_size', 'dt_offset', 'id', 'rev', 'flags',
+                     'custom0', 'custom1', 'custom2')
 
     @staticmethod
     def __get_number_or_prop(arg):
@@ -87,14 +82,7 @@
                 the tuple (_REQUIRED_KEYS)
         """
 
-        self.__version = kwargs['version']
-        required_keys = None
-        if self.__version == 0:
-            required_keys = self.REQUIRED_KEYS_V0
-        elif self.__version == 1:
-            required_keys = self.REQUIRED_KEYS_V1
-
-        missing_keys = set(required_keys) - set(kwargs)
+        missing_keys = set(self.REQUIRED_KEYS) - set(kwargs)
         if missing_keys:
             raise ValueError('Missing keys in DtEntry constructor: %r' %
                              sorted(missing_keys))
@@ -104,13 +92,10 @@
         self.__dt_size = kwargs['dt_size']
         self.__id = self.__get_number_or_prop(kwargs['id'])
         self.__rev = self.__get_number_or_prop(kwargs['rev'])
-        if self.__version == 1:
-            self.__flags = self.__get_number_or_prop(kwargs['flags'])
+        self.__flags = self.__get_number_or_prop(kwargs['flags'])
         self.__custom0 = self.__get_number_or_prop(kwargs['custom0'])
         self.__custom1 = self.__get_number_or_prop(kwargs['custom1'])
         self.__custom2 = self.__get_number_or_prop(kwargs['custom2'])
-        if self.__version == 0:
-            self.__custom3 = self.__get_number_or_prop(kwargs['custom3'])
 
     def __str__(self):
         sb = []
@@ -122,30 +107,26 @@
                                                    value=self.__id))
         sb.append('{key:>20} = {value:08x}'.format(key='rev',
                                                    value=self.__rev))
-        if self.__version == 1:
-            sb.append('{key:>20} = {value:08x}'.format(key='flags',
-                                                       value=self.__flags))
         sb.append('{key:>20} = {value:08x}'.format(key='custom[0]',
-                                                   value=self.__custom0))
+                                                   value=self.__flags))
         sb.append('{key:>20} = {value:08x}'.format(key='custom[1]',
-                                                   value=self.__custom1))
+                                                   value=self.__custom0))
         sb.append('{key:>20} = {value:08x}'.format(key='custom[2]',
+                                                   value=self.__custom1))
+        sb.append('{key:>20} = {value:08x}'.format(key='custom[3]',
                                                    value=self.__custom2))
-        if self.__version == 0:
-            sb.append('{key:>20} = {value:08x}'.format(key='custom[3]',
-                                                       value=self.__custom3))
         return '\n'.join(sb)
 
-    def compression_info(self):
+    def compression_info(self, version):
         """CompressionFormat: compression format for DT image file.
 
            Args:
                 version: Version of DTBO header, compression is only
                          supported from version 1.
         """
-        if self.__version == 0:
+        if version is 0:
             return CompressionFormat.NO_COMPRESSION
-        return self.flags & self.COMPRESSION_FORMAT_MASK
+        return self.flags & self._COMPRESSION_FORMAT_MASK
 
     @property
     def dt_file(self):
@@ -200,10 +181,6 @@
         """int: DT entry custom2 for this DT image."""
         return self.__custom2
 
-    @property
-    def custom3(self):
-        """int: DT entry custom3 for this DT image."""
-        return self.__custom3
 
 class Dtbo(object):
     """
@@ -255,17 +232,10 @@
             dtbo_offset: Offset where the DT image file for this dt_entry can
                 be found in the resulting DTBO image.
         """
-        if self.version == 0:
-            struct.pack_into('>8I', self.__metadata, metadata_offset, dt_entry.size,
-                             dt_entry.dt_offset, dt_entry.image_id, dt_entry.rev,
-                             dt_entry.custom0, dt_entry.custom1, dt_entry.custom2,
-                             dt_entry.custom3)
-        elif self.version == 1:
-            struct.pack_into('>8I', self.__metadata, metadata_offset, dt_entry.size,
-                             dt_entry.dt_offset, dt_entry.image_id, dt_entry.rev,
-                             dt_entry.flags, dt_entry.custom0, dt_entry.custom1,
-                             dt_entry.custom2)
-
+        struct.pack_into('>8I', self.__metadata, metadata_offset, dt_entry.size,
+                         dt_entry.dt_offset, dt_entry.image_id, dt_entry.rev,
+                         dt_entry.flags, dt_entry.custom0, dt_entry.custom1,
+                         dt_entry.custom2)
 
     def _update_metadata(self):
         """Updates the DTBO metadata.
@@ -274,7 +244,7 @@
         Tree table entries and update the DTBO header.
         """
 
-        self.__metadata = array('b', b' ' * self.__metadata_size)
+        self.__metadata = array('c', ' ' * self.__metadata_size)
         metadata_offset = self.header_size
         for dt_entry in self.__dt_entries:
             self._update_dt_entry_header(dt_entry, metadata_offset)
@@ -320,21 +290,15 @@
         if self.__dt_entries:
             raise ValueError('DTBO DT entries can be added only once')
 
-        offset = self.dt_entries_offset // 4
+        offset = self.dt_entries_offset / 4
         params = {}
-        params['version'] = self.version
         params['dt_file'] = None
         for i in range(0, self.dt_entry_count):
             dt_table_entry = self.__metadata[offset:offset + self._DT_ENTRY_HEADER_INTS]
             params['dt_size'] = dt_table_entry[0]
             params['dt_offset'] = dt_table_entry[1]
             for j in range(2, self._DT_ENTRY_HEADER_INTS):
-                required_keys = None
-                if self.version == 0:
-                    required_keys = DtEntry.REQUIRED_KEYS_V0
-                elif self.version == 1:
-                    required_keys = DtEntry.REQUIRED_KEYS_V1
-                params[required_keys[j + 1]] = str(dt_table_entry[j])
+                params[DtEntry.REQUIRED_KEYS[j + 1]] = str(dt_table_entry[j])
             dt_entry = DtEntry(**params)
             self.__dt_entries.append(dt_entry)
             offset += self._DT_ENTRY_HEADER_INTS
@@ -501,13 +465,14 @@
         dt_offset = (self.header_size +
                      dt_entry_count * self.dt_entry_size)
 
-        dt_entry_buf = b""
+        dt_entry_buf = ""
         for dt_entry in dt_entries:
             if not isinstance(dt_entry, DtEntry):
                 raise ValueError('Adding invalid DT entry object to DTBO')
             entry = self._find_dt_entry_with_same_file(dt_entry)
-            dt_entry_compression_info = dt_entry.compression_info()
-            if entry and (entry.compression_info() == dt_entry_compression_info):
+            dt_entry_compression_info = dt_entry.compression_info(self.version)
+            if entry and (entry.compression_info(self.version)
+                          == dt_entry_compression_info):
                 dt_entry.dt_offset = entry.dt_offset
                 dt_entry.size = entry.size
             else:
@@ -545,7 +510,7 @@
         offset = self.dt_entries[idx].dt_offset
         self.__file.seek(offset, 0)
         fout.seek(0)
-        compression_format = self.dt_entries[idx].compression_info()
+        compression_format = self.dt_entries[idx].compression_info(self.version)
         if decompress and compression_format:
             if (compression_format == CompressionFormat.ZLIB_COMPRESSION or
                 compression_format == CompressionFormat.GZIP_COMPRESSION):
@@ -615,9 +580,6 @@
     parser.add_argument('--custom2', type=str, dest='custom2',
                         action='store',
                         default=global_args.global_custom2)
-    parser.add_argument('--custom3', type=str, dest='custom3',
-                        action='store',
-                        default=global_args.global_custom3)
     return parser.parse_args(arglist)
 
 
@@ -650,7 +612,7 @@
         raise ValueError('Input DT images must be provided')
 
     total_images = len(img_file_idx)
-    for idx in range(total_images):
+    for idx in xrange(total_images):
         start_idx = img_file_idx[idx]
         if idx == total_images - 1:
             argv = arg_list[start_idx:]
@@ -659,7 +621,6 @@
             argv = arg_list[start_idx:end_idx]
         args = parse_dt_entry(global_args, argv)
         params = vars(args)
-        params['version'] = global_args.version
         params['dt_offset'] = 0
         params['dt_size'] = os.fstat(params['dt_file'].fileno()).st_size
         dt_entries.append(DtEntry(**params))
@@ -792,8 +753,6 @@
                         action='store', default='0')
     parser.add_argument('--custom2', type=str, dest='global_custom2',
                         action='store', default='0')
-    parser.add_argument('--custom3', type=str, dest='global_custom3',
-                        action='store', default='0')
     args = parser.parse_args(argv)
     return args, remainder
 
@@ -810,7 +769,7 @@
 
     parser = argparse.ArgumentParser(prog='dump')
     parser.add_argument('--output', '-o', nargs='?',
-                        type=argparse.FileType('w'),
+                        type=argparse.FileType('wb'),
                         dest='outfile',
                         default=stdout)
     parser.add_argument('--dtb', '-b', nargs='?', type=str,
@@ -830,7 +789,7 @@
     """
     parser = argparse.ArgumentParser(prog='cfg_create')
     parser.add_argument('conf_file', nargs='?',
-                        type=argparse.FileType('r'),
+                        type=argparse.FileType('rb'),
                         default=None)
     cwd = os.getcwd()
     parser.add_argument('--dtb-dir', '-d', nargs='?', type=str,
@@ -886,22 +845,15 @@
     if not args.conf_file:
         raise ValueError('Configuration file must be provided')
 
-    _DT_KEYS = ('id', 'rev', 'flags', 'custom0', 'custom1', 'custom2', 'custom3')
+    _DT_KEYS = ('id', 'rev', 'flags', 'custom0', 'custom1', 'custom2')
     _GLOBAL_KEY_TYPES = {'dt_type': str, 'page_size': int, 'version': int}
 
     global_args, dt_args = parse_config_file(args.conf_file,
                                              _DT_KEYS, _GLOBAL_KEY_TYPES)
-    version = global_args['version']
-
     params = {}
-    params['version'] = version
     dt_entries = []
     for dt_arg in dt_args:
-        filepath = dt_arg['filename']
-        if not os.path.isabs(filepath):
-            for root, dirnames, filenames in os.walk(args.dtbdir):
-                for filename in fnmatch.filter(filenames, os.path.basename(filepath)):
-                    filepath = os.path.join(root, filename)
+        filepath = args.dtbdir + os.sep + dt_arg['filename']
         params['dt_file'] = open(filepath, 'rb')
         params['dt_offset'] = 0
         params['dt_size'] = os.fstat(params['dt_file'].fileno()).st_size
@@ -913,7 +865,7 @@
         dt_entries.append(DtEntry(**params))
 
     # Create and write DTBO file
-    dtbo = Dtbo(fout, global_args['dt_type'], global_args['page_size'], version)
+    dtbo = Dtbo(fout, global_args['dt_type'], global_args['page_size'], global_args['version'])
     dt_entry_buf = dtbo.add_dt_entries(dt_entries)
     dtbo.commit(dt_entry_buf)
     fout.close()
@@ -964,7 +916,6 @@
     sb.append('      --custom0=<number>')
     sb.append('      --custom1=<number>')
     sb.append('      --custom2=<number>\n')
-    sb.append('      --custom3=<number>\n')
 
     sb.append('      The value could be a number or a DT node path.')
     sb.append('      <number> could be a 32-bits digit or hex value, ex. 68000, 0x6800.')