using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using UnityEngine.AddressableAssets.Utility;
using UnityEngine.ResourceManagement;
using UnityEngine.ResourceManagement.ResourceLocations;
using UnityEngine.ResourceManagement.ResourceProviders;
using UnityEngine.ResourceManagement.Util;
using UnityEngine.Serialization;
namespace UnityEngine.AddressableAssets.ResourceLocators
{
///
/// Contains serializable data for an IResourceLocation
///
public class ContentCatalogDataEntry
{
///
/// Internl id.
///
public string InternalId { get; set; }
///
/// IResourceProvider identifier.
///
public string Provider { get; private set; }
///
/// Keys for this location.
///
public List Keys { get; private set; }
///
/// Dependency keys.
///
public List Dependencies { get; private set; }
///
/// Serializable data for the provider.
///
public object Data { get; set; }
///
/// The type of the resource for th location.
///
public Type ResourceType { get; private set; }
///
/// Creates a new ContentCatalogEntry object.
///
/// The entry type.
/// The internal id.
/// The provider id.
/// The collection of keys that can be used to retrieve this entry.
/// Optional collection of keys for dependencies.
/// Optional additional data to be passed to the provider. For example, AssetBundleProviders use this for cache and crc data.
public ContentCatalogDataEntry(Type type, string internalId, string provider, IEnumerable keys, IEnumerable dependencies = null, object extraData = null)
{
InternalId = internalId;
Provider = provider;
ResourceType = type;
Keys = new List(keys);
Dependencies = dependencies == null ? new List() : new List(dependencies);
Data = extraData;
}
}
///
/// Container for ContentCatalogEntries.
///
[Serializable]
public class ContentCatalogData
{
//used to verify that this is a valid catalog data file
static int kMagic = nameof(ContentCatalogData).GetHashCode();
//used to check the version of the data in case the format needs to change in the future
const int kVersion = 1;
[NonSerialized]
internal string localHash;
[NonSerialized]
internal IResourceLocation location;
[SerializeField]
internal string m_LocatorId;
[SerializeField]
internal string m_BuildResultHash;
///
/// Stores the id of the data provider.
///
public string ProviderId
{
get { return m_LocatorId; }
internal set { m_LocatorId = value; }
}
[SerializeField]
ObjectInitializationData m_InstanceProviderData;
///
/// Data for the Addressables.ResourceManager.InstanceProvider initialization;
///
public ObjectInitializationData InstanceProviderData
{
get { return m_InstanceProviderData; }
set { m_InstanceProviderData = value; }
}
[SerializeField]
ObjectInitializationData m_SceneProviderData;
///
/// Data for the Addressables.ResourceManager.InstanceProvider initialization;
///
public ObjectInitializationData SceneProviderData
{
get { return m_SceneProviderData; }
set { m_SceneProviderData = value; }
}
[SerializeField]
internal List m_ResourceProviderData = new List();
///
/// The list of resource provider data. Each entry will add an IResourceProvider to the Addressables.ResourceManager.ResourceProviders list.
///
public List ResourceProviderData
{
get { return m_ResourceProviderData; }
set { m_ResourceProviderData = value; }
}
IList m_Entries;
///
/// Creates a new ContentCatalogData object with the specified locator id.
///
/// The id of the locator.
public ContentCatalogData(string id)
{
m_LocatorId = id;
}
///
/// Create a new ContentCatalogData object without any data.
///
public ContentCatalogData()
{
}
#if UNITY_EDITOR
///
/// Creates a new ContentCatalogData object with the specified locator id.
///
/// The id of the locator.
public ContentCatalogData(IList entries, string id = null)
{
m_LocatorId = id;
SetData(entries);
}
#endif
#if ENABLE_BINARY_CATALOG
internal void CleanData()
{
m_LocatorId = null;
m_Reader = null;
}
#if UNITY_EDITOR
///
/// Construct catalog data with entries.
///
/// The data entries.
/// The locator id.
public byte[] SerializeToByteArray()
{
var wr = new BinaryStorageBuffer.Writer(0, new Serializer());
wr.WriteObject(this, false);
return wr.SerializeToByteArray();
}
public void SetData(IList entries)
{
m_Entries = entries;
m_Reader = new BinaryStorageBuffer.Reader(SerializeToByteArray(), 1024, new Serializer());
}
internal void SaveToFile(string path)
{
var bytes = SerializeToByteArray();
File.WriteAllBytes(path, bytes);
}
#endif
BinaryStorageBuffer.Reader m_Reader;
internal ContentCatalogData(BinaryStorageBuffer.Reader reader)
{
m_Reader = reader;
}
internal IResourceLocator CreateCustomLocator(string overrideId = "", string providerSuffix = null, int locatorCacheSize = 100)
{
m_LocatorId = overrideId;
return new ResourceLocator(m_LocatorId, m_Reader, locatorCacheSize, providerSuffix);
}
internal class Serializer : BinaryStorageBuffer.ISerializationAdapter
{
public IEnumerable Dependencies => new BinaryStorageBuffer.ISerializationAdapter[]
{
new ObjectInitializationData.Serializer(),
new AssetBundleRequestOptionsSerializationAdapter(),
new ResourceLocator.ResourceLocation.Serializer()
};
public object Deserialize(BinaryStorageBuffer.Reader reader, Type t, uint offset)
{
var cd = new ContentCatalogData(reader);
var h = reader.ReadValue(offset);
if (h.magic != kMagic)
{
var h2 = reader.ReadValue(offset);
throw new Exception("Invalid header data!!!");
}
cd.InstanceProviderData = reader.ReadObject(h.instanceProvider);
cd.SceneProviderData = reader.ReadObject(h.sceneProvider);
cd.ResourceProviderData = reader.ReadObjectArray(h.initObjectsArray).ToList();
return cd;
}
public uint Serialize(BinaryStorageBuffer.Writer writer, object val)
{
var cd = val as ContentCatalogData;
var entries = cd.m_Entries;
var keyToEntryIndices = new Dictionary>();
for (int i = 0; i < entries.Count; i++)
{
var e = entries[i];
foreach (var k in e.Keys)
{
if (!keyToEntryIndices.TryGetValue(k, out var indices))
keyToEntryIndices.Add(k, indices = new List());
indices.Add(i);
}
}
//reserve header and keys to ensure they are first
var headerOffset = writer.Reserve();
var keysOffset = writer.Reserve((uint)keyToEntryIndices.Count);
var header = new ResourceLocator.Header
{
magic = kMagic,
version = kVersion,
keysOffset = keysOffset,
idOffset = writer.WriteString(cd.ProviderId),
instanceProvider = writer.WriteObject(cd.InstanceProviderData, false),
sceneProvider = writer.WriteObject(cd.SceneProviderData, false),
initObjectsArray = writer.WriteObjects(cd.m_ResourceProviderData, false)
};
writer.Write(headerOffset, in header);
//create array of all locations
var locationIds = new uint[entries.Count];
for (int i = 0; i < entries.Count; i++)
locationIds[i] = writer.WriteObject(new ResourceLocator.ContentCatalogDataEntrySerializationContext { entry = entries[i], allEntries = entries, keyToEntryIndices = keyToEntryIndices }, false);
//create array of all keys
int keyIndex = 0;
var allKeys = new ResourceLocator.KeyData[keyToEntryIndices.Count];
foreach (var k in keyToEntryIndices)
{
//create array of location ids
var locationOffsets = k.Value.Select(i => locationIds[i]).ToArray();
allKeys[keyIndex++] = new ResourceLocator.KeyData
{
keyNameOffset = writer.WriteObject(k.Key, true),
locationSetOffset = writer.Write(locationOffsets)
};
}
writer.Write(keysOffset, allKeys);
return headerOffset;
}
}
internal static ContentCatalogData LoadFromFile(string path, int cacheSize = 1024)
{
return new ContentCatalogData(new BinaryStorageBuffer.Reader(File.ReadAllBytes(path), cacheSize, new Serializer()));
}
#else
///
/// The IDs for the Resource Providers.
///
public string[] ProviderIds
{
get { return m_ProviderIds; }
}
///
/// Internal Content Catalog Entry IDs for Addressable Assets.
///
public string[] InternalIds
{
get { return m_InternalIds; }
}
[FormerlySerializedAs("m_providerIds")]
[SerializeField]
internal string[] m_ProviderIds = null;
[FormerlySerializedAs("m_internalIds")]
[SerializeField]
internal string[] m_InternalIds = null;
[FormerlySerializedAs("m_keyDataString")]
[SerializeField]
internal string m_KeyDataString = null;
[FormerlySerializedAs("m_bucketDataString")]
[SerializeField]
internal string m_BucketDataString = null;
[FormerlySerializedAs("m_entryDataString")]
[SerializeField]
internal string m_EntryDataString = null;
const int kBytesPerInt32 = 4;
const int k_EntryDataItemPerEntry = 7;
[FormerlySerializedAs("m_extraDataString")]
[SerializeField]
internal string m_ExtraDataString = null;
[SerializeField]
internal SerializedType[] m_resourceTypes = null;
[SerializeField]
string[] m_InternalIdPrefixes = null;
struct Bucket
{
public int dataOffset;
public int[] entries;
}
class CompactLocation : IResourceLocation
{
ResourceLocationMap m_Locator;
string m_InternalId;
string m_ProviderId;
object m_Dependency;
object m_Data;
int m_HashCode;
int m_DependencyHashCode;
string m_PrimaryKey;
Type m_Type;
public string InternalId
{
get { return m_InternalId; }
}
public string ProviderId
{
get { return m_ProviderId; }
}
public IList Dependencies
{
get
{
if (m_Dependency == null)
return null;
IList results;
m_Locator.Locate(m_Dependency, typeof(object), out results);
return results;
}
}
public bool HasDependencies
{
get { return m_Dependency != null; }
}
public int DependencyHashCode
{
get { return m_DependencyHashCode; }
}
public object Data
{
get { return m_Data; }
}
public string PrimaryKey
{
get { return m_PrimaryKey; }
set { m_PrimaryKey = value; }
}
public Type ResourceType
{
get { return m_Type; }
}
public override string ToString()
{
return m_InternalId;
}
public int Hash(Type t)
{
return (m_HashCode * 31 + t.GetHashCode()) * 31 + DependencyHashCode;
}
public CompactLocation(ResourceLocationMap locator, string internalId, string providerId, object dependencyKey, object data, int depHash, string primaryKey, Type type)
{
m_Locator = locator;
m_InternalId = internalId;
m_ProviderId = providerId;
m_Dependency = dependencyKey;
m_Data = data;
m_HashCode = internalId.GetHashCode() * 31 + providerId.GetHashCode();
m_DependencyHashCode = depHash;
m_PrimaryKey = primaryKey;
m_Type = type == null ? typeof(object) : type;
}
}
internal static ContentCatalogData LoadFromFile(string path, int cacheSize = 1024)
{
return JsonUtility.FromJson(File.ReadAllText(path));
}
internal void SaveToFile(string path)
{
File.WriteAllText(path, JsonUtility.ToJson(this));
}
internal void CleanData()
{
m_KeyDataString = "";
m_BucketDataString = "";
m_EntryDataString = "";
m_ExtraDataString = "";
m_InternalIds = null;
m_LocatorId = "";
m_ProviderIds = null;
m_ResourceProviderData = null;
m_resourceTypes = null;
}
internal ResourceLocationMap CreateCustomLocator(string overrideId = "", string providerSuffix = null)
{
m_LocatorId = overrideId;
return CreateLocator(providerSuffix);
}
///
/// Create IResourceLocator object
///
/// If specified, this value will be appeneded to all provider ids. This is used when loading additional catalogs that need to have unique providers.
/// ResourceLocationMap, which implements the IResourceLocator interface.
public ResourceLocationMap CreateLocator(string providerSuffix = null)
{
var bucketData = Convert.FromBase64String(m_BucketDataString);
int bucketCount = BitConverter.ToInt32(bucketData, 0);
var buckets = new Bucket[bucketCount];
int bi = 4;
for (int i = 0; i < bucketCount; i++)
{
var index = SerializationUtilities.ReadInt32FromByteArray(bucketData, bi);
bi += 4;
var entryCount = SerializationUtilities.ReadInt32FromByteArray(bucketData, bi);
bi += 4;
var entryArray = new int[entryCount];
for (int c = 0; c < entryCount; c++)
{
entryArray[c] = SerializationUtilities.ReadInt32FromByteArray(bucketData, bi);
bi += 4;
}
buckets[i] = new Bucket { entries = entryArray, dataOffset = index };
}
if (!string.IsNullOrEmpty(providerSuffix))
{
for (int i = 0; i < m_ProviderIds.Length; i++)
{
if (!m_ProviderIds[i].EndsWith(providerSuffix, StringComparison.Ordinal))
m_ProviderIds[i] = m_ProviderIds[i] + providerSuffix;
}
}
var extraData = Convert.FromBase64String(m_ExtraDataString);
var keyData = Convert.FromBase64String(m_KeyDataString);
var keyCount = BitConverter.ToInt32(keyData, 0);
var keys = new object[keyCount];
for (int i = 0; i < buckets.Length; i++)
keys[i] = SerializationUtilities.ReadObjectFromByteArray(keyData, buckets[i].dataOffset);
var locator = new ResourceLocationMap(m_LocatorId, buckets.Length);
var entryData = Convert.FromBase64String(m_EntryDataString);
int count = SerializationUtilities.ReadInt32FromByteArray(entryData, 0);
var locations = new IResourceLocation[count];
for (int i = 0; i < count; i++)
{
var index = kBytesPerInt32 + i * (kBytesPerInt32 * k_EntryDataItemPerEntry);
var internalId = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var providerIndex = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var dependencyKeyIndex = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var depHash = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var dataIndex = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var primaryKey = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
index += kBytesPerInt32;
var resourceType = SerializationUtilities.ReadInt32FromByteArray(entryData, index);
object data = dataIndex < 0 ? null : SerializationUtilities.ReadObjectFromByteArray(extraData, dataIndex);
locations[i] = new CompactLocation(locator, Addressables.ResolveInternalId(ExpandInternalId(m_InternalIdPrefixes, m_InternalIds[internalId])),
m_ProviderIds[providerIndex], dependencyKeyIndex < 0 ? null : keys[dependencyKeyIndex], data, depHash, keys[primaryKey].ToString(), m_resourceTypes[resourceType].Value);
}
for (int i = 0; i < buckets.Length; i++)
{
var bucket = buckets[i];
var key = keys[i];
var locs = new IResourceLocation[bucket.entries.Length];
for (int b = 0; b < bucket.entries.Length; b++)
locs[b] = locations[bucket.entries[b]];
locator.Add(key, locs);
}
return locator;
}
internal IList GetData()
{
var loc = CreateLocator();
var res = new List();
var locsToKeys = new Dictionary>();
foreach (var k in loc.Keys)
{
//foreach (var l in k.Value)
loc.Locate(k, null, out var locs);
foreach (var l in locs)
{
if (!locsToKeys.TryGetValue(l, out var keys))
locsToKeys.Add(l, keys = new List());
keys.Add(k.ToString());
}
}
foreach (var k in locsToKeys)
{
res.Add(new ContentCatalogDataEntry(k.Key.ResourceType, k.Key.InternalId, k.Key.ProviderId, k.Value, k.Key.Dependencies == null ? null : k.Key.Dependencies.Select(d => d.PrimaryKey).ToList(), k.Key.Data));
}
return res;
}
internal static string ExpandInternalId(string[] internalIdPrefixes, string v)
{
if (internalIdPrefixes == null || internalIdPrefixes.Length == 0)
return v;
int nextHash = v.LastIndexOf('#');
if (nextHash < 0)
return v;
int index = 0;
var numStr = v.Substring(0, nextHash);
if (!int.TryParse(numStr, out index))
return v;
return internalIdPrefixes[index] + v.Substring(nextHash + 1);
}
#if UNITY_EDITOR
class KeyIndexer
{
public List values;
public Dictionary map;
public KeyIndexer(IEnumerable keyCollection, int capacity)
{
values = new List(capacity);
map = new Dictionary(capacity);
if (keyCollection != null)
Add(keyCollection);
}
public void Add(IEnumerable keyCollection)
{
bool isNew = false;
foreach (var key in keyCollection)
Add(key, ref isNew);
}
public void Add(T key, ref bool isNew)
{
int index;
if (!map.TryGetValue(key, out index))
{
isNew = true;
map.Add(key, values.Count);
values.Add(key);
}
}
}
class KeyIndexer
{
public List values;
public Dictionary map;
public KeyIndexer(IEnumerable keyCollection, Func func, int capacity)
{
values = new List(capacity);
map = new Dictionary(capacity);
if (keyCollection != null)
Add(keyCollection, func);
}
void Add(IEnumerable keyCollection, Func func)
{
foreach (var key in keyCollection)
Add(key, func(key));
}
public void Add(TKey key, TVal val)
{
int index;
if (!map.TryGetValue(key, out index))
{
map.Add(key, values.Count);
values.Add(val);
}
}
public TVal this[TKey key]
{
get { return values[map[key]]; }
}
}
///
/// Sets the catalog data before serialization.
///
/// The list of catalog entries.
/// Whether to optimize the catalog size by extracting common internal id prefixes.
[Obsolete("The ]optimizeSize parameter for SetData is no longer used.")]
public void SetData(IList data, bool optimizeSize)
{
}
///
/// Sets the catalog data before serialization.
///
/// The list of catalog entries.
public void SetData(IList data)
{
if (data == null)
return;
var providers = new KeyIndexer(data.Select(s => s.Provider), 10);
var internalIds = new KeyIndexer(data.Select(s => s.InternalId), data.Count);
var keys = new KeyIndexer(data.SelectMany(s => s.Keys), data.Count * 3);
var types = new KeyIndexer(data.Select(s => s.ResourceType), 50);
keys.Add(data.SelectMany(s => s.Dependencies));
var keyIndexToEntries = new KeyIndexer, object>(keys.values, s => new List(), keys.values.Count);
var entryToIndex = new Dictionary(data.Count);
var extraDataList = new List(8 * 1024);
var entryIndexToExtraDataIndex = new Dictionary();
int extraDataIndex = 0;
//create buckets of key to data entry
for (int i = 0; i < data.Count; i++)
{
var e = data[i];
int extraDataOffset = -1;
if (e.Data != null)
{
var len = SerializationUtilities.WriteObjectToByteList(e.Data, extraDataList);
if (len > 0)
{
extraDataOffset = extraDataIndex;
extraDataIndex += len;
}
}
entryIndexToExtraDataIndex.Add(i, extraDataOffset);
entryToIndex.Add(e, i);
foreach (var k in e.Keys)
keyIndexToEntries[k].Add(e);
}
m_ExtraDataString = Convert.ToBase64String(extraDataList.ToArray());
//create extra entries for dependency sets
Dictionary hashSources = new Dictionary();
int originalEntryCount = data.Count;
for (int i = 0; i < originalEntryCount; i++)
{
var entry = data[i];
if (entry.Dependencies == null || entry.Dependencies.Count < 2)
continue;
var hashCode = CalculateCollectedHash(entry.Dependencies, hashSources).ToString();
bool isNew = false;
keys.Add(hashCode, ref isNew);
if (isNew)
{
//if this combination of dependecies is new, add a new entry and add its key to all contained entries
var deps = entry.Dependencies.Select(d => keyIndexToEntries[d][0]).ToList();
keyIndexToEntries.Add(hashCode, deps);
foreach (var dep in deps)
dep.Keys.Add(hashCode);
}
//reset the dependency list to only contain the key of the new set
entry.Dependencies.Clear();
entry.Dependencies.Add(hashCode);
}
//serialize internal ids and providers
m_InternalIds = internalIds.values.ToArray();
m_ProviderIds = providers.values.ToArray();
m_resourceTypes = types.values.Select(t => new SerializedType() { Value = t }).ToArray();
//serialize entries
{
var entryData = new byte[data.Count * (kBytesPerInt32 * k_EntryDataItemPerEntry) + kBytesPerInt32];
var entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, data.Count, 0);
for (int i = 0; i < data.Count; i++)
{
var e = data[i];
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, internalIds.map[e.InternalId], entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, providers.map[e.Provider], entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, e.Dependencies.Count == 0 ? -1 : keyIndexToEntries.map[e.Dependencies[0]], entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, GetHashCodeForEnumerable(e.Dependencies), entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, entryIndexToExtraDataIndex[i], entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, keys.map[e.Keys.First()], entryDataOffset);
entryDataOffset = SerializationUtilities.WriteInt32ToByteArray(entryData, (ushort)types.map[e.ResourceType], entryDataOffset);
}
m_EntryDataString = Convert.ToBase64String(entryData);
}
//serialize keys and mappings
{
var entryCount = keyIndexToEntries.values.Aggregate(0, (a, s) => a += s.Count);
var bucketData = new byte[4 + keys.values.Count * 8 + entryCount * 4];
var keyData = new List(keys.values.Count * 100);
keyData.AddRange(BitConverter.GetBytes(keys.values.Count));
int keyDataOffset = 4;
int bucketDataOffset = SerializationUtilities.WriteInt32ToByteArray(bucketData, keys.values.Count, 0);
for (int i = 0; i < keys.values.Count; i++)
{
var key = keys.values[i];
bucketDataOffset = SerializationUtilities.WriteInt32ToByteArray(bucketData, keyDataOffset, bucketDataOffset);
keyDataOffset += SerializationUtilities.WriteObjectToByteList(key, keyData);
var entries = keyIndexToEntries[key];
bucketDataOffset = SerializationUtilities.WriteInt32ToByteArray(bucketData, entries.Count, bucketDataOffset);
foreach (var e in entries)
bucketDataOffset = SerializationUtilities.WriteInt32ToByteArray(bucketData, entryToIndex[e], bucketDataOffset);
}
m_BucketDataString = Convert.ToBase64String(bucketData);
m_KeyDataString = Convert.ToBase64String(keyData.ToArray());
}
}
internal int CalculateCollectedHash(List objects, Dictionary hashSources)
{
var hashSource = new HashSet(objects);
var hashCode = GetHashCodeForEnumerable(hashSource);
if (hashSources.TryGetValue(hashCode, out var previousHashSource))
{
if (!(previousHashSource is HashSet b) || !hashSource.SetEquals(b))
throw new Exception($"INCORRECT HASH: the same hash ({hashCode}) for different dependency lists:\nsource 1: {previousHashSource}\nsource 2: {hashSource}");
}
else
hashSources.Add(hashCode, hashSource);
return hashCode;
}
internal static int GetHashCodeForEnumerable(IEnumerable set)
{
int hash = 0;
foreach (object o in set)
hash = hash * 31 + o.GetHashCode();
return hash;
}
#endif
#endif
#if ENABLE_BINARY_CATALOG
internal class ResourceLocator : IResourceLocator
{
public struct Header
{
public int magic;
public int version;
public uint keysOffset;
public uint idOffset;
public uint instanceProvider;
public uint sceneProvider;
public uint initObjectsArray;
}
public struct KeyData
{
public uint keyNameOffset;
public uint locationSetOffset;
}
internal class ContentCatalogDataEntrySerializationContext
{
public ContentCatalogDataEntry entry;
public Dictionary> keyToEntryIndices;
public IList allEntries;
}
internal class ResourceLocation : IResourceLocation
{
public class Serializer : BinaryStorageBuffer.ISerializationAdapter, BinaryStorageBuffer.ISerializationAdapter
{
public struct Data
{
public uint primaryKeyOffset;
public uint internalIdOffset;
public uint providerOffset;
public uint dependencySetOffset;
public int dependencyHashValue;
public uint extraDataOffset;
public uint typeId;
}
public IEnumerable Dependencies => null;
//read as location
public object Deserialize(BinaryStorageBuffer.Reader reader, Type t, uint offset)
{
return new ResourceLocation(reader, offset);
}
//write from data entry
public uint Serialize(BinaryStorageBuffer.Writer writer, object val)
{
var ec = val as ContentCatalogDataEntrySerializationContext;
var e = ec.entry;
uint depId = uint.MaxValue;
if (e.Dependencies != null && e.Dependencies.Count > 0)
{
var depIds = new HashSet();
foreach (var k in e.Dependencies)
foreach (var i in ec.keyToEntryIndices[k])
depIds.Add(writer.WriteObject(new ResourceLocator.ContentCatalogDataEntrySerializationContext { entry = ec.allEntries[i], allEntries = ec.allEntries, keyToEntryIndices = ec.keyToEntryIndices }, false));
depId = writer.Write(depIds.ToArray(), false);
}
var data = new Data
{
primaryKeyOffset = writer.WriteString(e.Keys[0] as string, '/'),
internalIdOffset = writer.WriteString(e.InternalId, '/'),
providerOffset = writer.WriteString(e.Provider, '.'),
dependencySetOffset = depId,
extraDataOffset = writer.WriteObject(e.Data, true),
typeId = writer.WriteObject(e.ResourceType, false)
};
return writer.Write(data);
}
}
public ResourceLocation(BinaryStorageBuffer.Reader r, uint id)
{
var d = r.ReadValue(id);
PrimaryKey = r.ReadString(d.primaryKeyOffset, '/', false);
InternalId = Addressables.ResolveInternalId(r.ReadString(d.internalIdOffset, '/', false));
Data = r.ReadObject(d.extraDataOffset, false);
ProviderId = r.ReadString(d.providerOffset, '.', true);
Dependencies = r.ReadObjectArray(d.dependencySetOffset, true);
DependencyHashCode = (int)d.dependencySetOffset;
ResourceType = r.ReadObject(d.typeId);
}
public string PrimaryKey { private set; get; }
public string InternalId { private set; get; }
public object Data { private set; get; }
public string ProviderId { set; get; }
public IList Dependencies { private set; get; }
public int DependencyHashCode { private set; get; }
public bool HasDependencies => DependencyHashCode >= 0;
public Type ResourceType { private set; get; }
public int Hash(Type t) => (InternalId.GetHashCode() * 31 + t.GetHashCode()) * 31 + DependencyHashCode;
}
struct CacheKey : IEquatable
{
public object key;
public Type type;
int hashCode;
public CacheKey(object o, Type t)
{
key = o;
type = t;
hashCode = type == null ? key.GetHashCode() : key.GetHashCode() * 31 + type.GetHashCode();
}
public bool Equals(CacheKey other) => key.Equals(other.key) && ((type == null && other.type == null) || type.Equals(other.type));
public override int GetHashCode() => hashCode;
}
LRUCache> m_Cache;
Dictionary keyData;
BinaryStorageBuffer.Reader reader;
public string LocatorId { get; private set; }
public IEnumerable Keys => keyData.Keys;
//TODO: this is VERY expensive with this locator since it will expand the entire thing into memory and then throw most of it away.
public IEnumerable AllLocations
{
get
{
var allLocs = new HashSet();
foreach (var kvp in keyData)
{
if (Locate(kvp.Key, null, out var locs))
{
foreach (var l in locs)
allLocs.Add(l);
}
}
return allLocs;
}
}
string providerSuffix;
internal ResourceLocator(string id, BinaryStorageBuffer.Reader reader, int cacheLimit, string providerSuffix)
{
LocatorId = id;
this.providerSuffix = providerSuffix;
this.reader = reader;
m_Cache = new LRUCache>(cacheLimit);
keyData = new Dictionary();
var header = reader.ReadValue(0);
var keyDataArray = reader.ReadValueArray(header.keysOffset, false);
int index = 0;
foreach (var k in keyDataArray)
{
var key = reader.ReadObject(k.keyNameOffset);
keyData.Add(key, k.locationSetOffset);
index++;
}
}
public bool Locate(object key, Type type, out IList locations)
{
var cacheKey = new CacheKey(key, type);
if (m_Cache.TryGet(cacheKey, out locations))
return true;
if (!keyData.TryGetValue(key, out var locationSetOffset))
{
locations = null;
return false;
}
var locs = reader.ReadObjectArray(locationSetOffset);
if (providerSuffix != null)
{
foreach (var l in locs)
{
if (!l.ProviderId.EndsWith(providerSuffix))
l.ProviderId = l.ProviderId + providerSuffix;
}
}
if (type == null || type == typeof(object))
{
locations = locs;
m_Cache.TryAdd(cacheKey, locations);
return true;
}
var validTypeCount = 0;
foreach (var l in locs)
if (type.IsAssignableFrom(l.ResourceType))
validTypeCount++;
if (validTypeCount == 0)
{
locations = null;
m_Cache.TryAdd(cacheKey, locations);
return false;
}
if (validTypeCount == locs.Length)
{
locations = locs;
m_Cache.TryAdd(cacheKey, locations);
return true;
}
locations = new List();
foreach (var l in locs)
{
if (type.IsAssignableFrom(l.ResourceType))
locations.Add(l);
}
m_Cache.TryAdd(cacheKey, locations);
locations = locs;
return locations != null;
}
}
class AssetBundleRequestOptionsSerializationAdapter : BinaryStorageBuffer.ISerializationAdapter
{
struct SerializedData
{
//since this data is likely to be duplicated, save it separately to allow the serialization system to dedupe
public struct Common
{
public short timeout;
public byte redirectLimit;
public byte retryCount;
public int flags;
public AssetLoadMode assetLoadMode
{
get => (flags & 1) == 1 ? AssetLoadMode.AllPackedAssetsAndDependencies : AssetLoadMode.RequestedAssetAndDependencies;
set => flags = (flags & ~1) | (int)value;
}
public bool chunkedTransfer
{
get => (flags & 2) == 2;
set => flags = (flags & ~2) | (int)(value ? 2 : 0);
}
public bool useCrcForCachedBundle
{
get => (flags & 4) == 4;
set => flags = (flags & ~4) | (int)(value ? 4 : 0);
}
public bool useUnityWebRequestForLocalBundles
{
get => (flags & 8) == 8;
set => flags = (flags & ~8) | (int)(value ? 8 : 0);
}
public bool clearOtherCachedVersionsWhenLoaded
{
get => (flags & 16) == 16;
set => flags = (flags & ~16) | (int)(value ? 16 : 0);
}
}
public uint hashId;
public uint bundleNameId;
public uint crc;
public uint bundleSize;
public uint commonId;
}
public IEnumerable Dependencies => null;
public object Deserialize(BinaryStorageBuffer.Reader reader, Type type, uint offset)
{
if (type != typeof(AssetBundleRequestOptions))
return null;
var sd = reader.ReadValue(offset);
var com = reader.ReadValue(sd.commonId);
return new AssetBundleRequestOptions
{
Hash = reader.ReadValue(sd.hashId).ToString(),
BundleName = reader.ReadString(sd.bundleNameId, '_'),
Crc = sd.crc,
BundleSize = sd.bundleSize,
Timeout = com.timeout,
RetryCount = com.retryCount,
RedirectLimit = com.redirectLimit,
AssetLoadMode = com.assetLoadMode,
ChunkedTransfer = com.chunkedTransfer,
UseUnityWebRequestForLocalBundles = com.useUnityWebRequestForLocalBundles,
UseCrcForCachedBundle = com.useCrcForCachedBundle,
ClearOtherCachedVersionsWhenLoaded = com.clearOtherCachedVersionsWhenLoaded
};
}
public uint Serialize(BinaryStorageBuffer.Writer writer, object obj)
{
var options = obj as AssetBundleRequestOptions;
var hash = Hash128.Parse(options.Hash);
var sd = new SerializedData
{
hashId = writer.Write(hash),
bundleNameId = writer.WriteString(options.BundleName, '_'),
crc = options.Crc,
bundleSize = (uint)options.BundleSize,
commonId = writer.Write(new SerializedData.Common
{
timeout = (short)options.Timeout,
redirectLimit = (byte)options.RedirectLimit,
retryCount = (byte)options.RetryCount,
assetLoadMode = options.AssetLoadMode,
chunkedTransfer = options.ChunkedTransfer,
clearOtherCachedVersionsWhenLoaded = options.ClearOtherCachedVersionsWhenLoaded,
useCrcForCachedBundle = options.UseCrcForCachedBundle,
useUnityWebRequestForLocalBundles = options.UseUnityWebRequestForLocalBundles
})
};
return writer.Write(sd);
}
}
#endif
}
}