#region Disclaimer / License // Copyright (C) 2009, Kenneth Skovhede // http://www.hexad.dk, opensource@hexad.dk // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA // #endregion Disclaimer / License using GeoAPI.Geometries; using OSGeo.MapGuide.MaestroAPI.Commands; using OSGeo.MapGuide.MaestroAPI.CoordinateSystem; using OSGeo.MapGuide.MaestroAPI.Exceptions; using OSGeo.MapGuide.MaestroAPI.Feature; using OSGeo.MapGuide.MaestroAPI.Mapping; using OSGeo.MapGuide.MaestroAPI.Schema; using OSGeo.MapGuide.MaestroAPI.SchemaOverrides; using OSGeo.MapGuide.MaestroAPI.Serialization; using OSGeo.MapGuide.ObjectModels; using OSGeo.MapGuide.ObjectModels.Common; using OSGeo.MapGuide.ObjectModels.IO; using OSGeo.MapGuide.ObjectModels.LayerDefinition; using OSGeo.MapGuide.ObjectModels.LoadProcedure; using OSGeo.MapGuide.ObjectModels.MapDefinition; using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.IO; using System.Text; using ObjCommon = OSGeo.MapGuide.ObjectModels.Common; namespace OSGeo.MapGuide.MaestroAPI { /// /// Base class of all connection classes. Covers functionality encompassed by /// the MapGuide Geospatial Platform API (ie. Feature Service and Resource Service) /// public abstract class PlatformConnectionBase { /// /// A list of cached serializers /// protected Hashtable m_serializers; /// /// The current XML validator /// protected XmlValidator m_validator; /// /// The path of Xsd schemas /// protected string m_schemasPath; /// /// A lookup table for Xsd Schemas /// protected Hashtable m_cachedSchemas; /// /// A flag indicating if Xsd validation is perfomed /// protected bool m_disableValidation = false; /// /// Initializes a new instance of the class. /// protected PlatformConnectionBase() { m_serializers = new Hashtable(); m_validator = new XmlValidator(); m_cachedSchemas = new Hashtable(); m_schemasPath = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "Schemas"); //NOXLATE } #region Serialization plumbing /// /// Deserializes an object from a stream. /// /// The expected object type /// The stream containing the object /// The deserialized object public virtual T DeserializeObject(Stream data) => (T)DeserializeObject(typeof(T), data); /// /// Deserializes an object from a stream. /// /// The expected object type /// The stream containing the object /// The deserialized object public virtual object DeserializeObject(Type type, Stream data) { //HACK: MGOS 2.2 outputs different capabilities xml (because it's actually the correct one!), so //without breaking support against 2.1 and older servers, we transform the xml to its pre-2.2 form if (type == typeof(ObjectModels.Capabilities.v1_0_0.FdoProviderCapabilities) && this.SiteVersion < new Version(2, 2)) { StringBuilder sb = null; using (StreamReader reader = new StreamReader(data)) { sb = new StringBuilder(reader.ReadToEnd()); } //Pre-2.2 the elements were suffixed with Collection, change the suffix to List sb.Replace("", ""); //NOXLATE sb.Replace("", ""); //NOXLATE sb.Replace("", ""); //NOXLATE sb.Replace("", ""); //NOXLATE sb.Replace("", ""); //NOXLATE sb.Replace("", ""); //NOXLATE byte[] bytes = Encoding.UTF8.GetBytes(sb.ToString()); //Replace the original input stream data = new MemoryStream(bytes); } //Must copy stream, because we will be reading it twice :( //Once for validation, and once for deserialization System.IO.MemoryStream ms = new System.IO.MemoryStream(); Utility.CopyStream(data, ms); ms.Position = 0; #if DEBUG_LASTMESSAGE //Save us a copy for later investigation using (System.IO.FileStream fs = System.IO.File.Open("lastResponse.xml", System.IO.FileMode.Create, System.IO.FileAccess.ReadWrite, System.IO.FileShare.None)) //NOXLATE Utility.CopyStream(ms, fs); ms.Position = 0; #endif //TODO: Find out why the "xs:include" doesn't work with validator //Validation is quite important, as we otherwise may end up injecting malicious code // if (!m_disableValidation) // { // m_validator.Validate(ms, GetSchema(type)); // ms.Position = 0; // } try { return GetSerializer(type).Deserialize(ms); } catch (Exception ex) { string s = ex.Message; throw; } } /// /// Serialize an object into a new memory stream. /// /// The object to serialize /// A memorystream with the serialized object public virtual System.IO.MemoryStream SerializeObject(object o) { MemoryStream ms = new MemoryStream(); GetSerializer(o.GetType()).Serialize(new Utf8XmlWriter(ms), o); return Utility.RemoveUTF8BOM(ms); } /// /// Serializes an object into a stream /// /// The object to serialize /// The stream to serialize into public virtual void SerializeObject(object o, Stream stream) { //The Utf8 writer makes sure the Utf8 tag is in place + sets encoding to Utf8 //This is needed because the server fails when rendering maps using non utf8 xml documents //And the XmlSerializer sytem in .Net does not have a method to set the encoding attribute //This does not remove the utf8 BOM marker :( //GetSerializer(o.GetType()).Serialize(new Utf8XmlWriter(stream), o); SerializeObject(o).WriteTo(stream); } /// /// Returns an XmlSerializer for the given type /// /// The object type to serialize /// An XmlSerializer for the given type virtual protected System.Xml.Serialization.XmlSerializer GetSerializer(Type type) { if (m_serializers[type] == null) m_serializers[type] = new System.Xml.Serialization.XmlSerializer(type); return (System.Xml.Serialization.XmlSerializer)m_serializers[type]; } #endregion Serialization plumbing #region Validation /// /// Gets or sets a flag that indicates if the Xml resources are validated before leaving and entering the server. /// public bool DisableValidation { get { return m_disableValidation; } set { m_disableValidation = value; } } /// /// Validates the current server version against the highest tested version. /// /// The version to validate protected virtual void ValidateVersion(SiteVersion version) => ValidateVersion(new Version(version.Version)); /// /// Validates the current server version against the highest tested version. /// /// The version to validate protected virtual void ValidateVersion(Version version) { if (version > this.MaxTestedVersion) throw new Exception("Untested with MapGuide Build > " + this.MaxTestedVersion.ToString()); //NOXLATE } #endregion Validation /// /// Gets the preview URL generator. /// /// The preview URL generator. Returns null if this connection does not support browser-based resource previews public virtual Resource.Preview.IResourcePreviewUrlGenerator GetPreviewUrlGenerator() { return null; } /// /// Gets the name of the provider of this implementation /// public abstract string ProviderName { get; } /// /// Gets a collection of name-value parameters required to create another copy /// of this connection via the /// /// public abstract NameValueCollection CloneParameters { get; } /// /// Gets the current SessionID. /// public abstract string SessionID { get; } /// /// Gets the interface of this connection /// /// protected abstract IServerConnection GetInterface(); /// /// Removes the version numbers from a providername /// /// The name of the provider, with or without version numbers /// The provider name without version numbers public virtual string RemoveVersionFromProviderName(string providername) => Utility.StripVersionFromProviderName(providername); /// /// Gets the Xsd schema for a given type. /// /// The type to get the schema for /// The schema for the given type virtual protected System.Xml.Schema.XmlSchema GetSchema(Type type) { if (m_cachedSchemas[type] == null) { System.Reflection.FieldInfo fi = type.GetField("SchemaName", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public); //NOXLATE if (fi == null) throw new Exception(string.Format(Strings.ErrorTypeHasNoSchemaInfo, type)); string xsd = (string)fi.GetValue(null); using (System.IO.FileStream fs = System.IO.File.Open(System.IO.Path.Combine(m_schemasPath, xsd), System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.Read)) m_cachedSchemas.Add(type, System.Xml.Schema.XmlSchema.Read(fs, null)); } return (System.Xml.Schema.XmlSchema)m_cachedSchemas[type]; } /// /// Raised when a resource is added /// public event ResourceEventHandler ResourceAdded; /// /// Raised when a resource is deleted. Note if a folder is deleted, this will /// only be raised for the folder and not its children. Also note that this is /// raised on any move operations as the original source is for all intents and /// purposes, deleted. /// public event ResourceEventHandler ResourceDeleted; /// /// Raised when a resource is updated /// public event ResourceEventHandler ResourceUpdated; /// /// Raises the event /// /// protected void OnResourceAdded(string resId) => this.ResourceAdded?.Invoke(this, new ResourceEventArgs(resId)); /// /// Raises the event /// /// protected void OnResourceDeleted(string resId) => this.ResourceDeleted?.Invoke(this, new ResourceEventArgs(resId)); /// /// Raises the event /// /// protected void OnResourceUpdated(string resId) => this.ResourceUpdated?.Invoke(this, new ResourceEventArgs(resId)); /// /// Gets or sets the collection of cached schemas. Use the object type for key, and an XmlSchema instance for value. /// public virtual Hashtable CachedSchemas { get { return m_cachedSchemas; } set { m_cachedSchemas = value; } } ~PlatformConnectionBase() { Dispose(false); } /// /// Releases unmanaged and - optionally - managed resources /// public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } /// /// Clones this instance. /// /// public abstract IServerConnection Clone(); /// /// Raised when a outbound request has been dispatched /// public event RequestEventHandler RequestDispatched; /// /// Called when [request dispatched]. /// /// The data. protected void OnRequestDispatched(string data) { var handler = this.RequestDispatched; if (handler != null) handler(this, new RequestEventArgs(data)); } #region Resource Service /// /// Returns raw data from the server a byte array /// /// The full resourceID to get data from /// Raw data from the given resource public abstract Stream GetResourceXmlData(string resourceID); /// /// Returns an object deserialized from server data. /// Uses the ResourceID to infer the object type. /// /// The full resourceID of the item to retrieve. /// A deserialized object. public virtual IResource GetResource(string resourceID) { var stream = GetResourceXmlData(resourceID); var rt = ResourceIdentifier.GetResourceTypeAsString(resourceID); IResource o = ObjectFactory.Deserialize(rt, stream); o.ResourceID = resourceID; return o; } /// /// Deletes the resource. /// /// The resourceid. public abstract void DeleteResource(string resourceID); /// /// Writes an object into a resourceID /// /// The resource to write into /// The resourcec to write public virtual void WriteResource(string resourceID, object resource) { System.IO.MemoryStream ms = SerializeObject(resource); ms.Position = 0; //Validate that our data is correctly formated /*if (!m_disableValidation) { m_validator.Validate(ms, GetSchema(resource.GetType())); ms.Position = 0; }*/ #if DEBUG_LASTMESSAGE using (System.IO.Stream s = System.IO.File.Open("lastSave.xml", System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) Utility.CopyStream(ms, s); ms.Position = 0; #endif SetResourceXmlData(resourceID, ms); } /// /// Writes raw data into a resource. /// /// The resourceID to write into /// The stream containing the data to write. public virtual void SetResourceXmlData(string resourceID, System.IO.Stream stream) { SetResourceXmlData(resourceID, stream, null); int purged = PurgeCachedItemsOf(resourceID); #if DEBUG System.Diagnostics.Trace.TraceInformation($"{purged} cached items purged for {resourceID}"); //NOXLATE #endif } /// /// Gets a full list of resources in the permanent server repository (Library). /// This method returns the full catalog and should be used sparringly. /// /// A list of contained resources public virtual ResourceList GetRepositoryResources() => GetRepositoryResources(StringConstants.RootIdentifier, null, -1, true); /// /// Gets a list of resources in the permanent server repository (Library). /// This method limits folder recursion to the specified depth. /// /// The max depth to recurse. Use -1 for no limit. /// A list of contained resources public virtual ResourceList GetRepositoryResources(int depth) => GetRepositoryResources(StringConstants.RootIdentifier, null, depth, true); /// /// Gets a list of resources in the permanent server repository (Library). /// This method limits folder recursion to the specified depth. /// /// The folder from which to return items. Use null for "Library://" /// The max depth to recurse. Use -1 for no limit. /// A list of contained resources public virtual ResourceList GetRepositoryResources(string startingpoint, int depth) => GetRepositoryResources(startingpoint, null, depth, true); /// /// Gets a list of resources in the permanent server repository (Library). /// This method limits folder recursion to the specified depth. /// /// The folder from which to return items. Use null for "Library://" /// A list of contained resources public virtual ResourceList GetRepositoryResources(string startingpoint) => GetRepositoryResources(startingpoint, null, -1, true); /// /// Gets a list of resources in the permanent server repository (Library). /// This method limits folder recursion to the specified depth. /// /// The folder from which to return items. Use null for "Library://" /// The type of resource to look for. Basically this is the resource extension, like ".MapDefinition". Use null for all resources. /// A list of contained resources public virtual ResourceList GetRepositoryResources(string startingpoint, string type) => GetRepositoryResources(startingpoint, type, -1, true); /// /// Gets a list of resources in the permanent server repository (Library). /// This method limits folder recursion to the specified depth. /// /// The folder from which to return items. Use null for "Library://" /// The type of resource to look for. Basically this is the resource extension, like ".MapDefinition". Use null for all resources. /// The max depth to recurse. Use -1 for no limit. /// A list of contained resources public ResourceList GetRepositoryResources(string startingpoint, string type, int depth) => GetRepositoryResources(startingpoint, type, depth, true); /// /// Gets a list of resources in the permanent server repository (Library). /// /// The folder from which to return items. Use null for "Library://" /// The type of resource to look for. Basically this is the resource extension, like ".MapDefinition". Use null for all resources. /// The max depth to recurse. Use -1 for no limit. /// A flag indicating if the count of subfolders and resources should be calculated for leaf nodes /// A list of contained resources public abstract ResourceList GetRepositoryResources(string startingpoint, string type, int depth, bool computeChildren); /// /// Forces a timestamp update of the specified resource. This is akin to /// setting the resource's content using its existing content. /// /// public virtual void Touch(string resourceId) { if (!ResourceIdentifier.IsFolderResource(resourceId)) { SetResourceXmlData(resourceId, GetResourceXmlData(resourceId)); } } /// /// Returns a boolean indicating if a given resource exists /// /// The resource to look for /// True if the resource exists false otherwise. Also returns false on error. public virtual bool ResourceExists(string resourceID) { try { string sourcefolder; if (resourceID.EndsWith("/")) //NOXLATE sourcefolder = resourceID.Substring(0, resourceID.Substring(0, resourceID.Length - 1).LastIndexOf("/") + 1); //NOXLATE else sourcefolder = resourceID.Substring(0, resourceID.LastIndexOf("/") + 1); //NOXLATE ResourceList lst = GetRepositoryResources(sourcefolder, 1); foreach (object o in lst.Items) if (o.GetType() == typeof(ResourceListResourceFolder) && ((ResourceListResourceFolder)o).ResourceId == resourceID) return true; else if (o.GetType() == typeof(ResourceListResourceDocument) && ((ResourceListResourceDocument)o).ResourceId == resourceID) return true; return false; } catch { return false; } } /// /// Updates all resource references inside an object. /// /// The object in which the resource references are to be updated /// The current resource path, the one updating from /// The new resource path, the one updating to /// True if the old and new resource path identifiers are folders, false otherwise public virtual void UpdateResourceReferences(object o, string oldresourcepath, string newresourcepath, bool folderupdates) => UpdateResourceReferences(o, oldresourcepath, newresourcepath, folderupdates, new Hashtable()); /// /// Updates all resource references inside an object. /// /// The object in which the resource references are to be updated /// The current resource path, the one updating from /// The new resource path, the one updating to /// True if the old and new resource path identifiers are folders, false otherwise /// A hashtable with objects previously visited. Used for recursion, leave as null when calling from outside the API. protected void UpdateResourceReferences(object o, string oldresourcepath, string newresourcepath, bool folderupdates, Hashtable visited) { if (o == null) return; if (visited == null) visited = new Hashtable(); //Prevent infinite recursion if (o as string == null && !o.GetType().IsPrimitive) { if (visited.ContainsKey(o)) return; else visited.Add(o, null); } if (folderupdates) { if (!oldresourcepath.EndsWith("/")) //NOXLATE oldresourcepath += "/"; //NOXLATE if (!newresourcepath.EndsWith("/")) //NOXLATE newresourcepath += "/"; //NOXLATE } //If the value is a document or fragment of a document, we still wan't to repoint it if (o as System.Xml.XmlDocument != null || o as System.Xml.XmlNode != null) { Queue lst = new Queue(); if (o as System.Xml.XmlDocument != null) { foreach (System.Xml.XmlNode n in (o as System.Xml.XmlDocument).ChildNodes) if (n.NodeType == System.Xml.XmlNodeType.Element) lst.Enqueue(n); } else lst.Enqueue(o as System.Xml.XmlNode); while (lst.Count > 0) { System.Xml.XmlNode n = lst.Dequeue(); foreach (System.Xml.XmlNode nx in n.ChildNodes) if (nx.NodeType == System.Xml.XmlNodeType.Element) lst.Enqueue(nx); //Anything not "ResourceId" is from the LoadProcedure if (n.Name == "ResourceId" || n.Name == "SpatialDataSourcesPath" || n.Name == "LayersPath" || n.Name == "RootPath" || n.Name == "MapsPath" || n.Name == "SymbolLibrariesPath") //NOXLATE { string current = n.InnerXml; if (folderupdates && current.StartsWith(oldresourcepath)) n.InnerXml = newresourcepath + current.Substring(oldresourcepath.Length); else if (current == oldresourcepath) n.InnerXml = newresourcepath; } foreach (System.Xml.XmlAttribute a in n.Attributes) { //Anything not "ResourceId" is from the LoadProcedure if (a.Name == "ResourceId" || n.Name == "SpatialDataSourcesPath" || n.Name == "LayersPath" || n.Name == "RootPath" || n.Name == "MapsPath" || n.Name == "SymbolLibrariesPath") //NOXLATE { string current = a.Value; if (folderupdates && current.StartsWith(oldresourcepath)) n.Value = newresourcepath + current.Substring(oldresourcepath.Length); else if (current == oldresourcepath) n.Value = newresourcepath; } } } //There can be no objects in an xml document or node, so just return immediately return; } //Try to find the object properties foreach (System.Reflection.PropertyInfo pi in o.GetType().GetProperties()) { //Only index free read-write properties are taken into account if (!pi.CanRead || !pi.CanWrite || pi.GetIndexParameters().Length != 0 || pi.GetValue(o, null) == null) continue; object v = pi.GetValue(o, null); if (v == null) continue; //If we are at a ResourceId property, update it as needed string str = v as string; IEnumerable enu = v as IEnumerable; if (str != null) { bool isResId = pi.Name == "ResourceId"; //NOXLATE if (!isResId) { //Search for attributes object[] xmlAttrs = pi.GetCustomAttributes(typeof(System.Xml.Serialization.XmlElementAttribute), false); if (xmlAttrs != null) { foreach (System.Xml.Serialization.XmlElementAttribute attr in xmlAttrs) { if (attr.Type == typeof(string) && attr.ElementName == "ResourceId") //NOXLATE { if (pi.Name == "ResourceId") //NOXLATE { isResId = true; break; } } } } } if (isResId) { string current = str; if (current != null) { if (folderupdates && current.StartsWith(oldresourcepath)) pi.SetValue(o, newresourcepath + current.Substring(oldresourcepath.Length), null); else if (current == oldresourcepath) pi.SetValue(o, newresourcepath, null); } } } else if (enu != null) { //Handle collections foreach (object ox in enu) { UpdateResourceReferences(ox, oldresourcepath, newresourcepath, folderupdates, visited); } } else { Type vt = v.GetType(); if (vt.IsArray) { //Handle arrays Array sourceArr = (Array)v; for (int i = 0; i < sourceArr.Length; i++) { UpdateResourceReferences(sourceArr.GetValue(i), oldresourcepath, newresourcepath, folderupdates, visited); } } else if (vt.IsClass) { //Handle subobjects UpdateResourceReferences(v, oldresourcepath, newresourcepath, folderupdates, visited); } } } } /// /// Moves a resource, and subsequently updates all resources pointing to the old resource path /// /// The current resource path, the one moving from /// The new resource path, the one moving to /// A callback delegate, being called for non progress reporting events. /// A callback delegate, being called for progress reporting events. /// public virtual bool MoveResourceWithReferences(string oldpath, string newpath, LengthyOperationCallBack callback, LengthyOperationProgressCallBack progress) { LengthyOperationProgressArgs la = new LengthyOperationProgressArgs(Strings.MovingResource, -1); if (progress != null) progress(this, la); if (la.Cancel) return false; MoveResource(oldpath, newpath, true); la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; la.Progress = -1; if (progress != null) progress(this, la); if (la.Cancel) return false; ArrayList items = new ArrayList(); Hashtable paths = new Hashtable(); //The old path does not exist, but luckily the call works anyway ResourceReferenceList rlf = EnumerateResourceReferences(oldpath); foreach (string s in rlf.ResourceId) if (!paths.ContainsKey(s)) { items.Add(new LengthyOperationCallbackArgs.LengthyOperationItem(s)); paths.Add(s, null); } la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; LengthyOperationCallbackArgs args = new LengthyOperationCallbackArgs((LengthyOperationCallbackArgs.LengthyOperationItem[])items.ToArray(typeof(LengthyOperationCallbackArgs.LengthyOperationItem))); if (callback != null) callback(this, args); if (args.Cancel) return false; if (args.Index > args.Items.Length) return true; if (args.Items.Length == 0) return true; do { LengthyOperationCallbackArgs.LengthyOperationItem item = args.Items[args.Index]; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Pending; if (callback != null) { callback(this, args); if (args.Cancel) return false; } try { System.Xml.XmlDocument d = new System.Xml.XmlDocument(); using (var ms = GetResourceXmlData(item.Itempath)) d.Load(ms); UpdateResourceReferences(d, oldpath, newpath, false); using (System.IO.MemoryStream ms = new System.IO.MemoryStream()) { d.Save(ms); ms.Position = 0; SetResourceXmlData(item.Itempath, ms); } item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Success; } catch (Exception ex) { string s = ex.Message; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Failure; } if (callback != null) { callback(this, args); if (args.Cancel) return false; } args.Index++; } while (!args.Cancel && args.Index < args.Items.Length); return !args.Cancel; } /// /// Moves a folder, and subsequently updates all resources pointing to the old resource path /// /// The current folder path, the one moving from /// The new folder path, the one moving to /// A callback delegate, being called for non progress reporting events. /// A callback delegate, being called for progress reporting events. /// public virtual bool MoveFolderWithReferences(string oldpath, string newpath, LengthyOperationCallBack callback, LengthyOperationProgressCallBack progress) { oldpath = FixAndValidateFolderPath(oldpath); newpath = FixAndValidateFolderPath(newpath); LengthyOperationProgressArgs la = new LengthyOperationProgressArgs(Strings.ProgressMovingFolder, -1); if (progress != null) progress(this, la); if (la.Cancel) return false; MoveFolder(oldpath, newpath, true); la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; int pg = 0; la.Progress = 0; la.StatusMessage = Strings.ProgressFindingFolderRefs; if (progress != null) progress(this, la); if (la.Cancel) return false; ResourceList lst = GetRepositoryResources(newpath); Hashtable items = new Hashtable(); foreach (object o in lst.Items) { if (o.GetType() == typeof(ResourceListResourceDocument)) { //The old path does not exist, but we need to enumerate references at the old location string resource_oldpath = ((ResourceListResourceDocument)o).ResourceId; resource_oldpath = oldpath + resource_oldpath.Substring(newpath.Length); ResourceReferenceList rlf = EnumerateResourceReferences(resource_oldpath); foreach (string s in rlf.ResourceId) if (!items.Contains(s)) items.Add(s, new LengthyOperationCallbackArgs.LengthyOperationItem(s)); } pg++; la.Progress = Math.Max(Math.Min(99, (int)(((double)pg / (double)lst.Items.Count) * (double)100)), 0); if (progress != null) progress(this, la); if (la.Cancel) return false; } la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; LengthyOperationCallbackArgs.LengthyOperationItem[] vi = new LengthyOperationCallbackArgs.LengthyOperationItem[items.Values.Count]; items.Values.CopyTo(vi, 0); LengthyOperationCallbackArgs args = new LengthyOperationCallbackArgs(vi); if (callback != null) callback(this, args); if (args.Cancel) return false; if (args.Index > args.Items.Length) return true; if (args.Items.Length == 0) return true; do { LengthyOperationCallbackArgs.LengthyOperationItem item = args.Items[args.Index]; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Pending; if (callback != null) { callback(this, args); if (args.Cancel) return false; } try { System.Xml.XmlDocument d = new System.Xml.XmlDocument(); using (var ms = GetResourceXmlData(item.Itempath)) d.Load(ms); UpdateResourceReferences(d, oldpath, newpath, true); using (System.IO.MemoryStream ms = new System.IO.MemoryStream()) { d.Save(ms); ms.Position = 0; SetResourceXmlData(item.Itempath, ms); } item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Success; } catch (Exception ex) { string s = ex.Message; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Failure; } if (callback != null) { callback(this, args); if (args.Cancel) return false; } args.Index++; } while (!args.Cancel && args.Index < args.Items.Length); return !args.Cancel; } /// /// Copies folder, and subsequently updates all resources within the folder to use the new folder path instead of the originating one. /// /// The current folder path, the one copying from /// The new folder path, the one copying to /// A callback delegate, being called for non progress reporting events. /// A callback delegate, being called for progress reporting events. /// public bool CopyFolderWithReferences(string oldpath, string newpath, LengthyOperationCallBack callback, LengthyOperationProgressCallBack progress) { oldpath = FixAndValidateFolderPath(oldpath); newpath = FixAndValidateFolderPath(newpath); ResourceList lst = GetRepositoryResources(oldpath); LengthyOperationProgressArgs la = new LengthyOperationProgressArgs(Strings.ProgressCopyingFolder, -1); if (progress != null) progress(this, la); if (la.Cancel) return false; CopyFolder(oldpath, newpath, true); la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; la.Progress = 0; la.StatusMessage = Strings.ProgressFindingFolderRefs; int pg = 0; if (progress != null) progress(this, la); if (la.Cancel) return false; ArrayList items = new ArrayList(); Hashtable paths = new Hashtable(); foreach (object o in lst.Items) { if (o.GetType() == typeof(ResourceListResourceDocument)) { ResourceReferenceList rlf = EnumerateResourceReferences(((ResourceListResourceDocument)o).ResourceId); foreach (string s in rlf.ResourceId) if (s.StartsWith(oldpath)) { string dest = newpath + s.Substring(oldpath.Length); if (!paths.ContainsKey(dest)) { items.Add(new LengthyOperationCallbackArgs.LengthyOperationItem(dest)); paths.Add(dest, null); } } } pg++; la.Progress = Math.Max(Math.Min(99, (int)(((double)pg / (double)lst.Items.Count) * (double)100)), 0); if (progress != null) progress(this, la); if (la.Cancel) return false; } la.Progress = 100; if (progress != null) progress(this, la); if (la.Cancel) return false; LengthyOperationCallbackArgs args = new LengthyOperationCallbackArgs((LengthyOperationCallbackArgs.LengthyOperationItem[])items.ToArray(typeof(LengthyOperationCallbackArgs.LengthyOperationItem))); if (callback != null) callback(this, args); if (args.Cancel) return false; if (args.Index > args.Items.Length) return true; if (args.Items.Length == 0) return true; do { LengthyOperationCallbackArgs.LengthyOperationItem item = args.Items[args.Index]; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Pending; if (callback != null) { callback(this, args); if (args.Cancel) return false; } try { System.Xml.XmlDocument d = new System.Xml.XmlDocument(); using (var ms = GetResourceXmlData(item.Itempath)) d.Load(ms); UpdateResourceReferences(d, oldpath, newpath, true); using (System.IO.MemoryStream ms = new System.IO.MemoryStream()) { d.Save(ms); ms.Position = 0; SetResourceXmlData(item.Itempath, ms); } item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Success; } catch (Exception ex) { string s = ex.Message; item.Status = LengthyOperationCallbackArgs.LengthyOperationItem.OperationStatus.Failure; } if (callback != null) { callback(this, args); if (args.Cancel) return false; } args.Index++; } while (!args.Cancel && args.Index < args.Items.Length); return !args.Cancel; } /// /// Validates the origin of the folder, and ensures the folder path has a trailing slash. /// /// The path to validate and fix /// The fixed path virtual protected string FixAndValidateFolderPath(string folderpath) { if (!folderpath.StartsWith(StringConstants.RootIdentifier) && !folderpath.StartsWith("Session:" + this.SessionID + "//")) //NOXLATE throw new Exception(Strings.ErrorInvalidResourceIdentifierType); if (!folderpath.EndsWith("/")) //NOXLATE folderpath += "/"; //NOXLATE return folderpath; } /// /// Creates a folder on the server /// /// The path of the folder to create public virtual void CreateFolder(string resourceID) { resourceID = FixAndValidateFolderPath(resourceID); SetResourceXmlData(resourceID, new MemoryStream()); } /// /// Returns a value indicating if a given folder exists /// /// The path of the folder /// True if the folder exists, false otherwise. Also returns false on error. public virtual bool HasFolder(string folderpath) { folderpath = FixAndValidateFolderPath(folderpath); try { ResourceList l = this.GetRepositoryResources(folderpath, 1); return true; } catch { return false; } } /// /// Enumereates all references to a given resource /// /// The resource to enumerate references for /// A list of resources that reference the given resourceID public abstract ResourceReferenceList EnumerateResourceReferences(string resourceid); /// /// Copies a resource from one location to another. This does not update any references. /// /// The current resource path, the one copying from /// The new resource path, the one copying to /// True if the copy can overwrite an existing resource, false otherwise public abstract void CopyResource(string oldpath, string newpath, bool overwrite); /// /// Copies a folder and all its content. This does not update any references. /// /// The current folder path, the one copying from /// The new folder path, the one copying to /// True if the copy can overwrite an existing folder, false otherwise public abstract void CopyFolder(string oldpath, string newpath, bool overwrite); /// /// Moves a resource from one location to another. This does not update any references. /// /// The current resource path, the one moving from /// The new resource path, the one moving to /// True if the move can overwrite an existing resource, false otherwise public abstract void MoveResource(string oldpath, string newpath, bool overwrite); /// /// Moves a folder and its content from one location to another. This does not update any references. /// /// The current folder path, the one moving from /// The new folder path, the one moving to /// True if the move can overwrite an existing folder, false otherwise public abstract void MoveFolder(string oldpath, string newpath, bool overwrite); /// /// Returns data from a resource as a memorystream /// /// The id of the resource to fetch data from /// The name of the associated data item /// A stream containing the references resource data public abstract Stream GetResourceData(string resourceID, string dataname); /// /// Uploads data to a resource /// /// The id of the resource to update /// The name of the data to update or create /// The type of data /// A stream containing the new content of the resource data public virtual void SetResourceData(string resourceid, string dataname, ResourceDataType datatype, Stream stream) => SetResourceData(resourceid, dataname, datatype, stream, null); /// /// Uploads data to a resource /// /// The id of the resource to update /// The name of the data to update or create /// The type of data /// A stream containing the new content of the resource data /// The callback. public abstract void SetResourceData(string resourceid, string dataname, ResourceDataType datatype, Stream stream, Utility.StreamCopyProgressDelegate callback); /// /// Removes all cached items associated with the given feature source /// /// /// protected int PurgeCachedItemsOf(string resourceId) { //All keys are encoded with the resource id at the beginning, //so hunt down all matching keys starting with our resource id //these will be queued for removal. var purgeFsd = new List(); foreach (var key in m_featureSchemaCache.Keys) { if (key.StartsWith(resourceId)) purgeFsd.Add(key); } var purgeCls = new List(); foreach (var key in m_classDefinitionCache.Keys) { if (key.StartsWith(resourceId)) purgeCls.Add(key); } int removed = 0; foreach (var key in purgeFsd) { if (m_featureSchemaCache.Remove(key)) removed++; } foreach (var key in purgeCls) { if (m_classDefinitionCache.Remove(key)) removed++; } return removed; } /// /// Saves the resource. /// /// The resource. public void SaveResource(IResource resource) { try { SaveResourceAs(resource, resource.ResourceID); } catch (Exception ex) { if (Utility.IsDbXmlError(ex)) ex.Data[Utility.XML_EXCEPTION_KEY] = resource.Serialize(); throw ex; } } /// /// Saves the resource with the specified resource ID /// /// The resource. /// The resourceid. public void SaveResourceAs(IResource resource, string resourceid) { try { var stream = ObjectFactory.Serialize(resource); SetResourceXmlData(resourceid, stream); } catch (Exception ex) { if (Utility.IsDbXmlError(ex)) ex.Data[Utility.XML_EXCEPTION_KEY] = resource.Serialize(); throw ex; } } /// /// Upload a MapGuide Package file to the server /// /// Name of the file to upload /// A callback argument used to display progress. May be null. public abstract void UploadPackage(string filename, Utility.StreamCopyProgressDelegate callback); /// /// Updates the repository. /// /// The resource id. /// The header. public abstract void UpdateRepository(string resourceId, ResourceFolderHeaderType header); /// /// Gets the folder or resource header. /// /// The resource id. /// public abstract object GetFolderOrResourceHeader(string resourceId); /// /// Sets the resource XML data. /// /// The resource id. /// The content. /// The header. public abstract void SetResourceXmlData(string resourceId, System.IO.Stream content, System.IO.Stream header); /// /// Gets the resource header. /// /// The resource ID. /// public virtual ResourceDocumentHeaderType GetResourceHeader(string resourceID) => (ResourceDocumentHeaderType)this.GetFolderOrResourceHeader(resourceID); /// /// Gets the folder header. /// /// The resource ID. /// public virtual ResourceFolderHeaderType GetFolderHeader(string resourceID) { if (resourceID.EndsWith("//")) { ResourceList lst = this.GetRepositoryResources(resourceID, 0); ResourceListResourceFolder fld = lst.Items[0] as ResourceListResourceFolder; return fld.ResourceFolderHeader; } else return (ResourceFolderHeaderType)this.GetFolderOrResourceHeader(resourceID); } /// /// Sets the folder header. /// /// The resource ID. /// The header. public virtual void SetFolderHeader(string resourceID, ResourceFolderHeaderType header) => SetFolderOrResourceHeader(resourceID, header); /// /// Sets the resource header. /// /// The resource ID. /// The header. public virtual void SetResourceHeader(string resourceID, ResourceDocumentHeaderType header) => SetFolderOrResourceHeader(resourceID, header); /// /// Sets the folder or resource header. /// /// The resource ID. /// The header. public virtual void SetFolderOrResourceHeader(string resourceID, object header) { if (header == null) throw new ArgumentNullException(nameof(header)); //NOXLATE ResourceSecurityType sec; if (header as ResourceFolderHeaderType != null) sec = (header as ResourceFolderHeaderType).Security; else if (header as ResourceDocumentHeaderType != null) sec = (header as ResourceDocumentHeaderType).Security; else throw new ArgumentException(Strings.ErrorInvalidResourceHeaderRootElement, nameof(header)); //NOXLATE if (sec.Users != null && sec.Users.User != null && sec.Users.User.Count == 0) sec.Users = null; if (sec.Groups != null && sec.Groups.Group != null && sec.Groups.Group.Count == 0) sec.Groups = null; if (resourceID.EndsWith("//")) //NOXLATE { if (header as ResourceFolderHeaderType == null) throw new Exception(string.Format(Strings.ErrorResourceMustBeUpdatedWithFolderHeader, resourceID)); UpdateRepository(resourceID, header as ResourceFolderHeaderType); } else this.SetResourceXmlData(resourceID, null, this.SerializeObject(header)); } /// /// Enumerates all unmanaged folders, meaning alias'ed folders /// /// The type of data to return /// A filter applied to the items /// True if the list should contains recursive results /// The path to retrieve the data from /// A list of unmanaged data public abstract UnmanagedDataList EnumerateUnmanagedData(string startpath, string filter, bool recursive, UnmanagedDataTypes type); #endregion Resource Service #region Feature Service /// /// Returns an installed provider, given the name of the provider /// /// The name of the provider /// The first matching provider or null public virtual FeatureProviderRegistryFeatureProvider GetFeatureProvider(string providername) { string pname = RemoveVersionFromProviderName(providername).ToLower(); foreach (FeatureProviderRegistryFeatureProvider p in this.FeatureProviders) if (RemoveVersionFromProviderName(p.Name).ToLower().Equals(pname.ToLower())) return p; return null; } /// /// Tests the connection. /// /// The featuresource. /// public abstract string TestConnection(string featuresource); /// /// Gets a list of installed feature providers /// public abstract FeatureProviderRegistryFeatureProvider[] FeatureProviders { get; } /// /// Returns the spatial info for a given featuresource /// /// The ID of the resource to query /// Query only active items /// A list of spatial contexts public abstract FdoSpatialContextList GetSpatialContextInfo(string resourceID, bool activeOnly); /// /// Gets the names of the identity properties from a feature /// /// The resourceID for the FeatureSource /// The classname of the feature, including schema /// A string array with the found identities public abstract string[] GetIdentityProperties(string resourceID, string classname); /// /// Describes the feature source. /// /// The resource ID. /// The schema. /// public abstract FeatureSchema DescribeFeatureSource(string resourceID, string schema); /// /// Describes the specified feature source restricted to only the specified schema and the specified class names /// /// /// /// /// public abstract FeatureSchema DescribeFeatureSourcePartial(string resourceID, string schema, string[] classNames); /// /// feature source description cache /// protected Dictionary m_featureSchemaCache = new Dictionary(); /// /// a class definition cache /// protected Dictionary m_classDefinitionCache = new Dictionary(); /// /// Calls the actual implementation of the DescribeFeatureSource API /// /// /// protected abstract FeatureSourceDescription DescribeFeatureSourceInternal(string resourceId); /// /// Gets the feature source description. /// /// The resource ID. /// public virtual FeatureSourceDescription DescribeFeatureSource(string resourceID) { #if DEBUG bool bFromCache = true; #endif if (!m_featureSchemaCache.ContainsKey(resourceID)) { #if DEBUG bFromCache = false; #endif var fsd = this.DescribeFeatureSourceInternal(resourceID); try { //Cache a clone of each class definition m_featureSchemaCache[resourceID] = FeatureSourceDescription.Clone(fsd); foreach (ClassDefinition cls in fsd.AllClasses) { string classCacheKey = resourceID + "!" + cls.QualifiedName; //NOXLATE m_classDefinitionCache[classCacheKey] = cls; } } catch { m_featureSchemaCache[resourceID] = null; } } #if DEBUG if (bFromCache) System.Diagnostics.Trace.TraceInformation($"Returning cached description for {resourceID}"); //NOXLATE #endif //Return a clone to ensure immutability of cached one return FeatureSourceDescription.Clone(m_featureSchemaCache[resourceID]); } /// /// Fetches the specified class definition /// /// /// /// /// protected abstract ClassDefinition GetClassDefinitionInternal(string resourceId, string schemaName, string className); /// /// Gets the class definition. /// /// The resource ID. /// The class name to look for. /// public virtual ClassDefinition GetClassDefinition(string resourceID, string className) { //NOTE: To prevent ambiguity, only class definitions queried with qualified //names are cached. Un-qualified ones will call directly into the implementing //GetClassDefinition API bool bQualified = className.Contains(":"); //NOXLATE string classCacheKey = resourceID + "!" + className; //NOXLATE ClassDefinition cls = null; bool bStoreInCache = true; #if DEBUG bool bFromCache = false; #endif //We don't interrogate the Feature Source Description cache because part of //caching a Feature Source Description is to cache all the classes within if (m_classDefinitionCache.ContainsKey(classCacheKey)) { cls = m_classDefinitionCache[classCacheKey]; bStoreInCache = false; #if DEBUG bFromCache = true; #endif } else { if (bQualified) { var tokens = className.Split(':'); //NOXLATE cls = GetClassDefinitionInternal(resourceID, tokens[0], tokens[1]); } else { cls = GetClassDefinitionInternal(resourceID, null, className); } } //Only class definitions queried with qualified names can be cached if (bStoreInCache && !bQualified) bStoreInCache = false; #if DEBUG if (bFromCache) System.Diagnostics.Trace.TraceInformation($"Returning cached class ({className}) for {resourceID}"); //NOXLATE #endif if (cls != null) { //Sanity check var key = resourceID + "!" + cls.QualifiedName; //NOXLATE if (bStoreInCache && classCacheKey == key) { m_classDefinitionCache[classCacheKey] = cls; } //Return a clone of the cached object to ensure immutability of //the original return ClassDefinition.Clone(cls); } return null; } internal int CachedFeatureSources => m_featureSchemaCache.Count; internal int CachedClassDefinitions => m_classDefinitionCache.Count; /// /// Resets the feature source schema cache. /// public virtual void ResetFeatureSourceSchemaCache() { m_featureSchemaCache = new Dictionary(); m_classDefinitionCache = new Dictionary(); } /// /// Performs an aggregate query on all columns in the datasource /// /// The resourceID of the FeatureSource to query /// The schema name /// The filter to apply to the /// A FeatureSetReader with the aggregated values public virtual IReader AggregateQueryFeatureSource(string resourceID, string schema, string filter) => AggregateQueryFeatureSource(resourceID, schema, filter, (string[])null); /// /// Performs an aggregate query on columns in the datasource /// /// The resourceID of the FeatureSource to query /// The schema name /// The filter to apply to the /// The columns to aggregate /// A IFeatureReader with the aggregated values public abstract IReader AggregateQueryFeatureSource(string resourceID, string schema, string filter, string[] columns); /// /// Performs an aggregate query on computed resources /// /// The resourceID of the FeatureSource to query /// The schema name /// The filter to apply to the /// A collection of column name and aggregate functions /// A FeatureSetReader with the aggregated values public abstract IReader AggregateQueryFeatureSource(string resourceID, string schema, string filter, System.Collections.Specialized.NameValueCollection aggregateFunctions); /// /// Gets the spatial extent. /// /// The resource ID. /// The schema. /// The geometry. /// public virtual ObjCommon.IEnvelope GetSpatialExtent(string resourceID, string schema, string geometry) => GetSpatialExtent(resourceID, schema, geometry, null, false); /// /// Gets the spatial extent. /// /// The resource ID. /// The schema. /// The geometry. /// The filter. /// public virtual ObjCommon.IEnvelope GetSpatialExtent(string resourceID, string schema, string geometry, string filter) => GetSpatialExtent(resourceID, schema, geometry, filter, false); /// /// Gets the spatial extent. /// /// The resource ID. /// The schema. /// The geometry. /// if set to true [allow fallback to context information]. /// public virtual ObjCommon.IEnvelope GetSpatialExtent(string resourceID, string schema, string geometry, bool allowFallbackToContextInformation) => GetSpatialExtent(resourceID, schema, geometry, null, allowFallbackToContextInformation); /// /// Gets the spatial extent. /// /// The resource ID. /// The schema. /// The geometry. /// The filter. /// if set to true [allow fallback to context information]. /// Thrown if the geometric extent is null /// protected virtual ObjCommon.IEnvelope GetSpatialExtent(string resourceID, string schema, string geometry, string filter, bool allowFallbackToContextInformation) { Check.ArgumentNotEmpty(schema, nameof(schema)); Check.ArgumentNotEmpty(geometry, nameof(geometry)); try { var fun = new NameValueCollection(); fun.Add("EXTENT", $"SpatialExtents(\"{geometry}\")"); //NOXLATE IReader fsr = null; try { fsr = AggregateQueryFeatureSource(resourceID, schema, filter, fun); if (fsr.ReadNext()) { if (fsr.IsNull("EXTENT")) //NOXLATE throw new NullExtentException(); IGeometry geom = fsr["EXTENT"] as IGeometry; //NOXLATE if (geom == null) { throw new NullExtentException(); } else { var env = geom.EnvelopeInternal; return ObjectFactory.CreateEnvelope( env.MinX, env.MinY, env.MaxX, env.MaxY); } } else throw new Exception(string.Format(Strings.ErrorNoDataInResource, resourceID)); } finally { fsr?.Close(); fsr?.Dispose(); fsr = null; } } catch { if (allowFallbackToContextInformation) try { FdoSpatialContextList lst = this.GetSpatialContextInfo(resourceID, false); if (lst.SpatialContext != null && lst.SpatialContext.Count >= 1) { return ObjectFactory.CreateEnvelope( double.Parse(lst.SpatialContext[0].Extent.LowerLeftCoordinate.X, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture), double.Parse(lst.SpatialContext[0].Extent.LowerLeftCoordinate.Y, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture), double.Parse(lst.SpatialContext[0].Extent.UpperRightCoordinate.X, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture), double.Parse(lst.SpatialContext[0].Extent.UpperRightCoordinate.Y, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture) ); } } catch { } throw; } } /// /// Enumerates the data stores. /// /// Name of the provider. /// The partial conn string. /// public abstract DataStoreList EnumerateDataStores(string providerName, string partialConnString); /// /// Gets the schemas. /// /// The resource id. /// public abstract string[] GetSchemas(string resourceId); /// /// Gets the class names. /// /// The resource id. /// Name of the schema. /// public abstract string[] GetClassNames(string resourceId, string schemaName); /// /// Gets the long transactions for the specified feature source /// /// The feature source id /// If true, will only return active long transactions /// public abstract ILongTransactionList GetLongTransactions(string resourceId, bool activeOnly); /// /// Gets the schema mappings for the given FDO provider. These mappings form the basis for a custom configuration document /// for a feature source that supports configuration /// /// The FDO provider /// The connection string /// public abstract ConfigurationDocument GetSchemaMapping(string provider, string partialConnString); /// /// Executes a feature query on the specified feature source /// /// The Feature Source ID /// The feature class name /// The FDO filter string that determines what features will be returned /// A containing the results of the query public IFeatureReader QueryFeatureSource(string resourceID, string className, string filter) => QueryFeatureSource(resourceID, className, filter, null); /// /// Executes a feature query on the specified feature source /// /// The Feature Source ID /// The feature class name /// A containing the results of the query public IFeatureReader QueryFeatureSource(string resourceID, string className) => QueryFeatureSource(resourceID, className, null, null); /// /// Executes a feature query on the specified feature source /// /// The Feature Source ID /// The feature class name /// The FDO filter string that determines what features will be returned /// A list of properties that are to be returned in the query result /// A containing the results of the query public IFeatureReader QueryFeatureSource(string resourceID, string className, string filter, string[] propertyNames) => QueryFeatureSource(resourceID, className, filter, propertyNames, null); /// /// Executes a feature query on the specified feature source /// /// The Feature Source ID /// The feature class name /// The FDO filter string that determines what features will be returned /// A list of properties that are to be returned in the query result /// A list of name/value pairs that contain the alias (name) for an FDO expression (value) /// Limits the number of features returned in the reader. -1 for all features /// A containing the results of the query public virtual IFeatureReader QueryFeatureSource(string resourceID, string className, string filter, string[] propertyNames, NameValueCollection computedProperties, int limit) { var reader = this.QueryFeatureSource(resourceID, className, filter, propertyNames, computedProperties); if (limit < 0) return reader; else return new LimitingFeatureReader(reader, limit); } /// /// Executes a feature query on the specified feature source /// /// The Feature Source ID /// The feature class name /// The FDO filter string that determines what features will be returned /// A list of properties that are to be returned in the query result /// A list of name/value pairs that contain the alias (name) for an FDO expression (value) /// A containing the results of the query public abstract IFeatureReader QueryFeatureSource(string resourceID, string className, string filter, string[] propertyNames, NameValueCollection computedProperties); #endregion Feature Service #region Feature/Capability Discovery /// /// Gets the highest version the API is currently tested againts /// public virtual Version MaxTestedVersion => SiteVersions.GetVersion(KnownSiteVersions.MapGuideOS2_1); /// /// Gets the site version. /// /// The site version. public abstract Version SiteVersion { get; } /// /// Gets the custom property names. /// /// public abstract string[] GetCustomPropertyNames(); /// /// Gets the type of the custom property. /// /// The name. /// public abstract Type GetCustomPropertyType(string name); /// /// Sets the custom property. /// /// The name. /// The value. public abstract void SetCustomProperty(string name, object value); /// /// Gets the custom property. /// /// The name. /// public abstract object GetCustomProperty(string name); /// /// Creates the command. /// /// Type of the CMD. /// public virtual ICommand CreateCommand(int cmdType) { CommandType ct = (CommandType)cmdType; switch (ct) { default: return null; } } #endregion Feature/Capability Discovery #region runtime map /// /// Infers the meters per unit value from the specified coordinate system /// /// /// /// protected virtual double InferMPU(string csWkt, double units) { try { var cs = CoordinateSystemBase.Create(csWkt); return cs.MetersPerUnitX * units; } catch { return 1.0; } } private class DefaultCalculator : IMpuCalculator { private readonly PlatformConnectionBase _conn; public DefaultCalculator(PlatformConnectionBase conn) { _conn = conn; } public double Calculate(string csWkt, double units) { return _conn.InferMPU(csWkt, units); } } /// /// Gets the MPU calculator /// /// public virtual IMpuCalculator GetCalculator() => new DefaultCalculator(this); /// /// Creates the map group. /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The name. /// public virtual RuntimeMapGroup CreateMapGroup(RuntimeMap parent, string name) => new RuntimeMapGroup(parent, name); //TODO: Review when we decide to split the implementations /// /// Creates a new runtime map group /// /// The map. /// The group. /// public virtual RuntimeMapGroup CreateMapGroup(RuntimeMap parent, IBaseMapGroup group) => new RuntimeMapGroup(parent, group); //TODO: Review when we decide to split the implementations /// /// Creates a new runtime map group /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The group. /// public virtual RuntimeMapGroup CreateMapGroup(RuntimeMap parent, IMapLayerGroup group) => new RuntimeMapGroup(parent, group); //TODO: Review when we decide to split the implementations /// /// Creates a new runtime map layer from the specified Layer Definition /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The layer definition /// public virtual RuntimeMapLayer CreateMapLayer(RuntimeMap parent, ILayerDefinition ldf) => CreateMapLayer(parent, ldf, true); //TODO: Review when we decide to split the implementations /// /// Creates a new runtime map layer from the specified Layer Definition /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The layer definition /// /// public virtual RuntimeMapLayer CreateMapLayer(RuntimeMap parent, ILayerDefinition ldf, bool suppressErrors) => new RuntimeMapLayer(parent, ldf, suppressErrors); //TODO: Review when we decide to split the implementations /// /// Creates a new runtime map layer from the specified instance /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The map definition layer /// public RuntimeMapLayer CreateMapLayer(RuntimeMap parent, IBaseMapLayer source) => CreateMapLayer(parent, source, true); /// /// Creates a new runtime map layer from the specified instance /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The map definition layer /// /// public RuntimeMapLayer CreateMapLayer(RuntimeMap parent, IBaseMapLayer source, bool suppressErrors) { ILayerDefinition layerDef = (ILayerDefinition)GetResource(source.ResourceId); var rtLayer = CreateMapLayer(parent, layerDef, suppressErrors); //These may not match, so set them here rtLayer.ExpandInLegend = source.ExpandInLegend; rtLayer.LegendLabel = source.LegendLabel; rtLayer.Name = source.Name; rtLayer.Selectable = source.Selectable; rtLayer.ShowInLegend = source.ShowInLegend; rtLayer.Visible = true; rtLayer.Type = RuntimeMapLayer.kBaseMap; return rtLayer; } /// /// Creates a new runtime map layer from the specified instance /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The map definition layer /// public RuntimeMapLayer CreateMapLayer(RuntimeMap parent, IMapLayer source) => CreateMapLayer(parent, source); /// /// Creates a new runtime map layer from the specified instance /// /// The parent runtime map. The runtime map must have been created or opened from this same service instance /// The map definition layer /// /// public RuntimeMapLayer CreateMapLayer(RuntimeMap parent, IMapLayer source, bool suppressErrors) { ILayerDefinition layerDef = (ILayerDefinition)GetResource(source.ResourceId); var rtLayer = CreateMapLayer(parent, layerDef, suppressErrors); //These may not match, so set them here rtLayer.ExpandInLegend = source.ExpandInLegend; rtLayer.LegendLabel = source.LegendLabel; rtLayer.Name = source.Name; rtLayer.Selectable = source.Selectable; rtLayer.ShowInLegend = source.ShowInLegend; rtLayer.Group = source.Group; rtLayer.Visible = source.Visible; return rtLayer; } /// /// Creates a new runtime map instance from an existing map definition. Meters per unit /// is calculated from the Coordinate System WKT of the map definition. /// /// /// Calculation of meters-per-unit may differ between implementations. This may have an adverse /// effect on things such as rendering and measuring depending on the underlying implementation /// /// If you are certain of the meters-per-unit value required, use the overloaded method that /// accepts a metersPerUnit parameter. /// /// /// /// public RuntimeMap CreateMap(string runtimeMapResourceId, string baseMapDefinitionId) => CreateMap(runtimeMapResourceId, baseMapDefinitionId, true); /// /// Creates a new runtime map instance from an existing map definition. Meters per unit /// is calculated from the Coordinate System WKT of the map definition. /// /// /// Calculation of meters-per-unit may differ between implementations. This may have an adverse /// effect on things such as rendering and measuring depending on the underlying implementation /// /// If you are certain of the meters-per-unit value required, use the overloaded method that /// accepts a metersPerUnit parameter. /// /// /// /// /// public RuntimeMap CreateMap(string runtimeMapResourceId, string baseMapDefinitionId, bool suppressErrors) { var mdf = (IMapDefinition)GetResource(baseMapDefinitionId); double mpu = 1.0; mpu = CsHelper.DefaultCalculator != null ? CsHelper.DefaultCalculator.Calculate(mdf.CoordinateSystem, 1.0) : InferMPU(mdf.CoordinateSystem, 1.0); return CreateMap(runtimeMapResourceId, mdf, mpu, suppressErrors); } /// /// Creates a new runtime map instance from an existing map definition /// /// /// /// /// public virtual RuntimeMap CreateMap(string runtimeMapResourceId, string baseMapDefinitionId, double metersPerUnit) => CreateMap(runtimeMapResourceId, baseMapDefinitionId, metersPerUnit, true); /// /// Creates a new runtime map instance from an existing map definition /// /// /// /// /// /// public virtual RuntimeMap CreateMap(string runtimeMapResourceId, string baseMapDefinitionId, double metersPerUnit, bool suppressErrors) { var mdf = (IMapDefinition)GetResource(baseMapDefinitionId); return CreateMap(runtimeMapResourceId, mdf, metersPerUnit, suppressErrors); } /// /// Creates a new runtime map instance from an existing map definition. The runtime map resource id is calculated from the /// current session id and the name component of the Map Definition resource id /// /// /// public RuntimeMap CreateMap(IMapDefinition mdf) => CreateMap(mdf, true); /// /// Creates a new runtime map instance from an existing map definition. The runtime map resource id is calculated from the /// current session id and the name component of the Map Definition resource id /// /// /// /// public RuntimeMap CreateMap(IMapDefinition mdf, bool suppressErrors) { var rid = new ResourceIdentifier(ResourceIdentifier.GetName(mdf.ResourceID), ResourceTypes.Map, this.SessionID); return CreateMap(rid.ToString(), mdf, suppressErrors); } /// /// Creates a new runtime map instance from an existing map definition. The runtime map resource id is calculated from the /// current session id and the name component of the Map Definition resource id /// /// The map definition. /// The meters per unit. /// public RuntimeMap CreateMap(IMapDefinition mdf, double metersPerUnit) => CreateMap(mdf, metersPerUnit, true); /// /// Creates a new runtime map instance from an existing map definition. The runtime map resource id is calculated from the /// current session id and the name component of the Map Definition resource id /// /// The map definition. /// The meters per unit. /// /// public RuntimeMap CreateMap(IMapDefinition mdf, double metersPerUnit, bool suppressErrors) { var rid = new ResourceIdentifier(ResourceIdentifier.GetName(mdf.ResourceID), ResourceTypes.Map, this.SessionID); return CreateMap(rid.ToString(), mdf, metersPerUnit, suppressErrors); } /// /// Creates a new runtime map instance from an existing map definition. Meters per unit /// is calculated from the Coordinate System WKT of the map definition. /// /// /// Calculation of meters-per-unit may differ between implementations. This may have an adverse /// effect on things such as rendering and measuring depending on the underlying implementation /// /// If you are certain of the meters-per-unit value required, use the overloaded method that /// accepts a metersPerUnit parameter. /// /// /// /// public RuntimeMap CreateMap(string runtimeMapResourceId, IMapDefinition mdf) => CreateMap(runtimeMapResourceId, mdf, true); /// /// Creates a new runtime map instance from an existing map definition. Meters per unit /// is calculated from the Coordinate System WKT of the map definition. /// /// /// Calculation of meters-per-unit may differ between implementations. This may have an adverse /// effect on things such as rendering and measuring depending on the underlying implementation /// /// If you are certain of the meters-per-unit value required, use the overloaded method that /// accepts a metersPerUnit parameter. /// /// /// /// /// public RuntimeMap CreateMap(string runtimeMapResourceId, IMapDefinition mdf, bool suppressErrors) { double mpu = 1.0; mpu = CsHelper.DefaultCalculator != null ? CsHelper.DefaultCalculator.Calculate(mdf.CoordinateSystem, 1.0) : InferMPU(mdf.CoordinateSystem, 1.0); return CreateMap(runtimeMapResourceId, mdf, mpu, suppressErrors); } /// /// Creates a new runtime map instance from an existing map definition /// /// /// /// /// public virtual RuntimeMap CreateMap(string runtimeMapResourceId, IMapDefinition mdf, double metersPerUnit) => CreateMap(runtimeMapResourceId, mdf, metersPerUnit, true); /// /// Creates a new runtime map instance from an existing map definition /// /// /// /// /// /// public virtual RuntimeMap CreateMap(string runtimeMapResourceId, IMapDefinition mdf, double metersPerUnit, bool suppressErrors) { var map = new RuntimeMap(GetInterface(), mdf, metersPerUnit, suppressErrors); map.ResourceID = runtimeMapResourceId; map.IsDirty = false; return map; } /// /// Opens the specified runtime map /// /// /// public virtual RuntimeMap OpenMap(string runtimeMapResourceId) { if (!runtimeMapResourceId.StartsWith("Session:") || !runtimeMapResourceId.EndsWith(".Map")) //NOXLATE throw new ArgumentException(Strings.ErrorRuntimeMapNotInSessionRepo); var map = new RuntimeMap(GetInterface()); map.Deserialize(new MgBinaryDeserializer(this.GetResourceData(runtimeMapResourceId, "RuntimeData"), this.SiteVersion)); //NOXLATE if (this.SiteVersion >= SiteVersions.GetVersion(KnownSiteVersions.MapGuideOS1_2)) map.DeserializeLayerData(new MgBinaryDeserializer(this.GetResourceData(runtimeMapResourceId, "LayerGroupData"), this.SiteVersion)); //NOXLATE map.IsDirty = false; return map; } #endregion runtime map #region Load Procedure /// /// Executes the load procedure. /// /// The load proc. /// The callback. /// if set to true [ignore unsupported]. /// public virtual string[] ExecuteLoadProcedure(ILoadProcedure loadProc, OSGeo.MapGuide.MaestroAPI.LengthyOperationProgressCallBack callback, bool ignoreUnsupported) { var cmd = new ExecuteLoadProcedure(GetInterface()); cmd.IgnoreUnsupportedFeatures = ignoreUnsupported; return cmd.Execute(loadProc, callback); } /// /// Executes the load procedure. /// /// The resource ID. /// The callback. /// if set to true [ignore unsupported]. /// public virtual string[] ExecuteLoadProcedure(string resourceID, OSGeo.MapGuide.MaestroAPI.LengthyOperationProgressCallBack callback, bool ignoreUnsupported) { var cmd = new ExecuteLoadProcedure(GetInterface()); cmd.IgnoreUnsupportedFeatures = ignoreUnsupported; return cmd.Execute(resourceID, callback); } #endregion Load Procedure } }