diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..412eeda --- /dev/null +++ b/.gitattributes @@ -0,0 +1,22 @@ +# Auto detect text files and perform LF normalization +* text=auto + +# Custom for Visual Studio +*.cs diff=csharp +*.sln merge=union +*.csproj merge=union +*.vbproj merge=union +*.fsproj merge=union +*.dbproj merge=union + +# Standard to msysgit +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b9d6bd9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,215 @@ +################# +## Eclipse +################# + +*.pydevproject +.project +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.classpath +.settings/ +.loadpath + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# CDT-specific +.cproject + +# PDT-specific +.buildpath + + +################# +## Visual Studio +################# + +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# User-specific files +*.suo +*.user +*.sln.docstates + +# Build results + +[Dd]ebug/ +[Rr]elease/ +x64/ +build/ +[Bb]in/ +[Oo]bj/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +*_i.c +*_p.c +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.log +*.scc + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opensdf +*.sdf +*.cachefile + +# Visual Studio profiler +*.psess +*.vsp +*.vspx + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +*.ncrunch* +.*crunch*.local.xml + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.Publish.xml +*.pubxml + +# NuGet Packages Directory +## TODO: If you have NuGet Package Restore enabled, uncomment the next line +#packages/ + +# Windows Azure Build Output +csx +*.build.csdef + +# Windows Store app package directory +AppPackages/ + +# Others +sql/ +*.Cache +ClientBin/ +[Ss]tyle[Cc]op.* +~$* +*~ +*.dbmdl +*.[Pp]ublish.xml +*.pfx +*.publishsettings + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file to a newer +# Visual Studio version. Backup files are not needed, because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +App_Data/*.mdf +App_Data/*.ldf + +############# +## Windows detritus +############# + +# Windows image file caches +Thumbs.db +ehthumbs.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Mac crap +.DS_Store + + +############# +## Python +############# + +*.py[co] + +# Packages +*.egg +*.egg-info +dist/ +build/ +eggs/ +parts/ +var/ +sdist/ +develop-eggs/ +.installed.cfg + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox + +#Translations +*.mo + +#Mr Developer +.mr.developer.cfg diff --git a/.vs/config/applicationhost.config b/.vs/config/applicationhost.config new file mode 100644 index 0000000..74295d5 --- /dev/null +++ b/.vs/config/applicationhost.config @@ -0,0 +1,1038 @@ + + + + + + + + +
+
+
+
+
+
+
+
+ + + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+ +
+
+
+
+
+
+ +
+
+
+
+
+ +
+
+
+ +
+
+ +
+
+ +
+
+
+ + +
+
+
+
+
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/XMLValidatorWeb.jmconfig b/XMLValidatorWeb.jmconfig new file mode 100644 index 0000000..10eea63 --- /dev/null +++ b/XMLValidatorWeb.jmconfig @@ -0,0 +1 @@ +false \ No newline at end of file diff --git a/XMLValidatorWeb.sln b/XMLValidatorWeb.sln new file mode 100644 index 0000000..a252c91 --- /dev/null +++ b/XMLValidatorWeb.sln @@ -0,0 +1,20 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2012 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "XMLValidatorWeb", "XMLValidatorWeb\XMLValidatorWeb.csproj", "{D9E0A2E6-768A-4377-AF62-675D156F8813}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D9E0A2E6-768A-4377-AF62-675D156F8813}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D9E0A2E6-768A-4377-AF62-675D156F8813}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D9E0A2E6-768A-4377-AF62-675D156F8813}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D9E0A2E6-768A-4377-AF62-675D156F8813}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLBasics.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLBasics.cs new file mode 100644 index 0000000..79f43d6 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLBasics.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Xml; +using VectorMath; + +namespace DOEgbXML +{ + public class DOEgbXMLBasics + { + public static bool SliversAllowed = true; + + public enum MeasurementUnits + { + cubicft, + sqft, + ft, + spaces, + levels, + } + + public class Tolerances + { + public const double ToleranceDefault = -999; + public const double VolumePercentageTolerance = 0.05; //percentage (as in 0.05 = 5%) + public const double AreaPercentageTolerance = 0.05; //percentage (as in 0.05 = 5%) + public const double AreaTolerance = 1; //absolute tolerance in square feet + public const double crossProductTolerance = 0.01; //degrees + + //Level (aka - story) height difference tolerance in feet + public const double LevelHeightTolerance = 0.1; + public const double VectorAngleTolerance = 2.5; + public const double SpaceAreaTolerance = 1; + //all count tolerances + public const int SpaceCountTolerance = 0; + public const int LevelCountTolerance = 0; + public const double SurfaceCountTolerance = 0; + public const double ExteriorWallCountTolerance = 0; + public const double InteriorWallCountTolerance = 0; + public const double InteriorFloorCountTolerance = 0; + public const double RoofCountTolerance = 0; + public const double AirWallCountTolerance = 0; + public const double OpeningCountTolerance = 0; + public const double FixedWindowCountTolerance = 0; + public const double OperableWindowCountTolerance = 0; + public const double FixedSkylightCountTolerance = 0; + public const double OperableSkylightCountTolerance = 0; + public const double SlidingDoorCountTolerance = 0; + public const double NonSlidingDoorCountTolerance = 0; + public const double AirOpeningCountTolerance = 0; + + //surface tolerances + public const double SurfaceHeightTolerance = 0.5; //feet + public const double SurfaceWidthTolerance = 0.5; //feet + public const double SurfaceTiltTolerance = 2.5; // degrees + public const double SurfaceAzimuthTolerance = 2.5; //degrees + public const double SurfaceInsPtXTolerance = 0.5; //feet + public const double SurfaceInsPtYTolerance = 0.5; //feet + public const double SurfaceInsPtZTolerance = 0.5; //feet + public const double SurfacePLCoordTolerance = 1.0; //feet (12 inches) + public const double SliverDimensionTolerance = 0.25; //feet + public const double SurfaceAreaPercentageTolerance = 0.025; + + //opening tolerances + public const double OpeningHeightTolerance = 0.5; //feet + public const double OpeningWidthTolerance = 0.5; //feet + public const double OpeningSurfaceInsPtXTolerance = 0.5; //feet + public const double OpeningSurfaceInsPtYTolerance = 0.5; //feet + public const double OpeningSurfaceInsPtZTolerance = 0.5; //feet + public const double OpeningAreaPercentageTolerance = 0.025; + } + + public class Conversions + { + //the idea is, it searches through the document and finds items to switch out + //it is called when needed by a user or programmer + public XmlDocument ConvertFtToMeter(XmlDocument origdoc, XmlNamespaceManager gbXMLns1) + { + //number of feet in a meter + double convrate = 3.280839895; + + XmlDocument retdoc = (XmlDocument)origdoc.Clone(); + //by default, I will always change these nodes because they are the minimum that must be presented + + + //surface polyloop + //surface lower left hand corner + //building storeys + XmlNodeList nodes = retdoc.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns1); + if (nodes.Count > 0) + { + foreach (XmlNode Node in nodes) + { + XmlNodeList childnodes = Node.ChildNodes; + foreach (XmlNode childnode in childnodes) + { + if (childnode.Name == "Level") + { + childnode.Value = Convert.ToString(Convert.ToDouble(childnode.Value) / convrate); + } + else if (childnode.Name == "PlanarGeometry") + { + //change the planar geometry + foreach (XmlNode PolyLoops in childnode) + { + //gathers all the cartesian points in a given polyloop + foreach (XmlNode cartesianPoints in PolyLoops) + { + foreach (XmlNode coordinate in cartesianPoints) + { + if (coordinate.Name == "Coordinate") + { + coordinate.Value = Convert.ToString(Convert.ToDouble(coordinate.Value) / convrate); + } + else + { + //this is bad, should terminate somehow + } + } + } + } + } + } + } + } + //space planar geometry + //space shell geometry + //space space boundaries + XmlNodeList spacenodes = retdoc.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Spaces", gbXMLns1); + if (nodes.Count > 0) + { + foreach (XmlNode Node in nodes) + { + XmlNodeList childnodes = Node.ChildNodes; + foreach (XmlNode childnode in childnodes) + { + if (childnode.Name == "PlanarGeometry") + { + //change the planar geometry + foreach (XmlNode PolyLoops in childnode) + { + //gathers all the cartesian points in a given polyloop + foreach (XmlNode cartesianPoints in PolyLoops) + { + foreach (XmlNode coordinate in cartesianPoints) + { + if (coordinate.Name == "Coordinate") + { + coordinate.Value = Convert.ToString(Convert.ToDouble(coordinate.Value) / convrate); + } + else + { + //this is bad, should terminate somehow + } + } + } + } + } + else if (childnode.Name == "ShellGeometry") + { + //this should always be the ClosedShell element + XmlNode closedShell = childnode.FirstChild; + foreach (XmlNode PolyLoops in childnode) + { + //gathers all the cartesian points in a given polyloop + foreach (XmlNode cartesianPoints in PolyLoops) + { + foreach (XmlNode coordinate in cartesianPoints) + { + if (coordinate.Name == "Coordinate") + { + coordinate.Value = Convert.ToString(Convert.ToDouble(coordinate.Value) / convrate); + } + else + { + //this is bad, should terminate somehow + } + } + } + } + } + } + } + } + return retdoc; + } + } + + public class EdgeFamily + { + public List startendpt; + public string sbdec; + public List relatedEdges; + } + + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLParser.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLParser.cs new file mode 100644 index 0000000..47e06f4 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLParser.cs @@ -0,0 +1,10067 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Xml; +using System.Xml.XPath; +using System.Text.RegularExpressions; +using VectorMath; +using System.Web; +using DOEgbXML; +using XMLValidatorWeb.SupportFiles; +using UnitConversions; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using log4net; +using log4net.Appender; + +namespace DOEgbXML +{ + class OpeningDefinitions + { + //creates instances of an object that store information about surfaces in a gbXML file + public string OpeningType; + public string OpeningId; + public string ParentSurfaceId; + public double ParentAzimuth; + public double ParentTilt; + public double Azimuth; + public double Tilt; + public double Height; + public double Width; + public double surfaceArea; + public Vector.MemorySafe_CartCoord InsertionPoint; + public List PlCoords; + public Vector.MemorySafe_CartVect PlRHRVector; + } + + class XMLParser + { + + //this is the output string + private static readonly ILog logger = + LogManager.GetLogger(typeof(XMLParser)); + public string output; + public string browserjson; + public string log; + public string table; + bool overallPassTest = true; + DOEgbXMLTestCriteriaObject TestCriteria; + DOEgbXMLTestDetail TestDetail; + gbXMLMatches globalMatchObject; + string TestToRun; + XmlDocument gbXMLStandardFile; + XmlDocument gbXMLTestFile; + public string summaryTable; + public List ReportList; + + //a List of strings with all the test files that I want to test + //eventually this will be a dynamically created list based on what has been uploaded. + //this list should be the list of all the test + + static Dictionary filepaths = new Dictionary() + { + //{"Test1" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 1 - Standard File.xml")}, + //{"Test2" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 2 - Standard File.xml")}, + {"Test3" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 3 - Standard File 6-0-1.xml")}, + //{"Test4" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 4 - Standard File.xml")}, + //{"Test5" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 5 - Standard File.xml")}, + {"Test6" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 6 - Standard File 6-0-1.xml")}, + {"Test7" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 7 - Standard File 6-0-1.xml")}, + {"Test8" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 8 - Standard File 6-0-1.xml")}, + {"Test12" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 12 - Standard File 6-0-1.xml")}, + //{"Test25" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 25 - Standard File.xml")}, + //{"Test28" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Test Case 28 - Standard File.xml")}, + {"Whole Building Test 1" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Whole Building Test Case 1 - Standard File.xml")}, + //{"Whole Building Test 2" ,Path.Combine(HttpRuntime.AppDomainAppPath,"SupportFiles/TestFiles/Whole Building Test Case 2 - Standard File.xml")} + }; + + + //Shell Geometry RHR Dictionaries + static Dictionary> TFShellGeomRHRes = new Dictionary>(); + static Dictionary> SFShellGeomRHRes = new Dictionary>(); + + //minimum number of points to define a plane + static int minPlanePoints = 3; + //value to hold, starting arbitrarily large + static double StoryHeightMin = 100.0; + + #region Test Report + public void StartTest(XmlReader xmldoc, string testToRun, ref gbXMLReport gbr, string username = "The Donald") + { + + log4net.Config.XmlConfigurator.Configure(); + + TestToRun = testToRun; + globalMatchObject = new gbXMLMatches(); + globalMatchObject.Init(); + + //first create a list of lists that is indexed identically to the drop down list the user selects + TestDetail = new DOEgbXMLTestDetail(); + //then populate the list of lists. All indexing is done "by hand" in InitializeTestResultStrings() + TestDetail.InitializeTestResultStrings(); + + //create report list reportlist will store all the test result + ReportList = new List(); + + //Load an XML File for the test at hand + gbXMLTestFile = new XmlDocument(); + gbXMLTestFile.Load(xmldoc); + + gbXMLStandardFile = new XmlDocument(); + gbXMLStandardFile.Load(filepaths[TestToRun]); + + + if (!TestFileIsAvailable()) + { + //TODO: update browser json with something to indicate there is a problem + return; + } + + + //Define the namespace + XmlNamespaceManager gbXMLns1 = new XmlNamespaceManager(gbXMLTestFile.NameTable); + gbXMLns1.AddNamespace("gbXMLv5", "http://www.gbxml.org/schema"); + XmlNamespaceManager gbXMLns2 = new XmlNamespaceManager(gbXMLStandardFile.NameTable); + gbXMLns2.AddNamespace("gbXMLv5", "http://www.gbxml.org/schema"); + + List gbXMLdocs = new List(); + gbXMLdocs.Add(gbXMLTestFile); + gbXMLdocs.Add(gbXMLStandardFile); + List gbXMLnsm = new List(); + gbXMLnsm.Add(gbXMLns1); + gbXMLnsm.Add(gbXMLns2); + + //standardizing all tests on US-IP + Conversions c = new Conversions(); + Conversions.volumeUnitEnum testVol = Conversions.volumeUnitEnum.CubicFeet; + Conversions.volumeUnitEnum validatorVol = Conversions.volumeUnitEnum.CubicFeet; + Conversions.areaUnitEnum testArea = Conversions.areaUnitEnum.SquareFeet; + Conversions.areaUnitEnum validatorArea = Conversions.areaUnitEnum.SquareFeet; + Conversions.lengthUnitEnum testLength = Conversions.lengthUnitEnum.Feet; + Conversions.lengthUnitEnum validatorLength = Conversions.lengthUnitEnum.Feet; + + Conversions.lengthUnitEnum standardLength = Conversions.lengthUnitEnum.Feet; + Conversions.areaUnitEnum standardArea = Conversions.areaUnitEnum.SquareFeet; + Conversions.volumeUnitEnum standardVol = Conversions.volumeUnitEnum.CubicFeet; + + //standardize all units to feet, square feet, and cubic feet + double testlengthConversion = 1; + double testareaConversion = 1; + double testvolumeConversion = 1; + double standardlengthConversion = 1; + double standardareaConversion = 1; + double standardvolConversion = 1; + + bool mustBePlanar = false; + + for(int ns=0; ns< gbXMLnsm.Count; ns++) + { + if (ns == 0) + { + XmlNodeList nodes = gbXMLdocs[ns].SelectNodes("/gbXMLv5:gbXML", gbXMLnsm[ns]); + StandardizeToUSIP(nodes, c, ref testlengthConversion, ref testareaConversion, ref testvolumeConversion, ref testLength, ref testArea, ref testVol); + } + else + { + XmlNodeList nodes = gbXMLdocs[ns].SelectNodes("/gbXMLv5:gbXML", gbXMLnsm[ns]); + StandardizeToUSIP(nodes, c, ref standardlengthConversion, ref standardareaConversion, ref standardvolConversion, ref standardLength, ref standardArea, ref standardVol); + } + } + + //TODO: Add a summary of the Unit of Measures stuff above to the final result + + //Create a Log file that logs the success or failure of each test. + //Eventually maybe I want to create a little HTML factory + + output = ""; + log = ""; + table += "
" + + "

" + "Test Sections" + "

"; + table += ""; + table += "" + + "" + + "" + + "" + + "" + + "" + + ""; + + string units; + DOEgbXMLReportingObj report = new DOEgbXMLReportingObj(); + CampusReport camprep = new CampusReport(); + + //this string I can manipulate to produce the final test output + string menujson = JsonConvert.SerializeXmlNode(gbXMLStandardFile); + //prepare json for view + menujson = MakeViewJson(menujson); + + gbr.menu = menujson; + gbr.CampusReport = camprep; + + report.standResult = new List(); + report.testResult = new List(); + report.idList = new List(); + report.MessageList = new List(); + report.TestPassedDict = new Dictionary(); + + //Set up the Global Pass/Fail criteria for the test case file + TestCriteria = new DOEgbXMLTestCriteriaObject(); + TestCriteria.InitializeTestCriteriaWithTestName(testToRun); + //needed specially for area and volume tests + DOEgbXMLTestDetail afile = TestDetail.TestDetailList.Find(x => x.testName == TestToRun); + + BuildingSummary bs = new BuildingSummary(); + bs.FileType = "Standard"; + camprep.BuildingSummary.Add(bs); + BuildingSummary bst = new BuildingSummary(); + bst.FileType = "Test"; + camprep.BuildingSummary.Add(bst); + //Test 2 execute + //report.tolerance = DOEgbXMLBasics.Tolerances.AreaTolerance; + //report.testType = TestType.Building_Area; + //units = DOEgbXMLBasics.MeasurementUnits.sqft.ToString(); + //report = GetBuildingArea(gbXMLdocs, gbXMLnsm, report, validatorArea, testArea, testareaConversion, standardareaConversion); + logger.Info("START: BUILDING AREA TEST"); + var baresult = GetBuildingArea(gbXMLdocs, gbXMLnsm, ref camprep, validatorArea, testArea, testareaConversion, standardareaConversion, DOEgbXMLBasics.Tolerances.AreaPercentageTolerance, afile ); + if(!baresult) + { + camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + } + logger.Info("END: BUILDING AREA TEST"); + + //GetBuildingArea(gbXMLdocs,gbXMLnsm,) + //AddToOutPut("Building Area Test Passed: ", report, true); + + //Test 3 execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.SpaceCountTolerance; + //report.testType = TestType.Space_Count; + //units = DOEgbXMLBasics.MeasurementUnits.spaces.ToString(); + //report = GetBuildingSpaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: BUILDING SPACE COUNT TEST"); + SpacesSummary ssm = new SpacesSummary(); + ssm.FileType = "Standard"; + camprep.SpacesSummary.Add(ssm); + SpacesSummary ssmt = new SpacesSummary(); + ssmt.FileType = "Test"; + camprep.SpacesSummary.Add(ssmt); + var spctResult = GetBuildingSpaceCount(gbXMLdocs, gbXMLnsm, "", DOEgbXMLBasics.Tolerances.SpaceCountTolerance, ref camprep); + if(!spctResult) + { + camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + } + logger.Info("END: BUILDING SPACE COUNT TEST"); + //AddToOutPut("Building Space Quantity Count Test Passed: ", report, true); + + // Building Stories Tests.... + ////Test 4 execute + //report.Clear(); + report.tolerance = DOEgbXMLBasics.Tolerances.LevelCountTolerance; + //report.testType = TestType.Building_Story_Count; + //units = DOEgbXMLBasics.MeasurementUnits.levels.ToString(); + //report = GetBuildingStoryCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: Building Storey Count Test"); + var blstctresult = GetBuildingStoryCount(gbXMLdocs, gbXMLnsm, ref camprep, DOEgbXMLBasics.Tolerances.LevelCountTolerance); + if(!blstctresult) + { + camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //TODO: Need a Building Story Summary Field + } + logger.Info("END: Building Storey Count Test"); + //AddToOutPut("Building Story Count Test Passed: ", report, true); + + + //Test 5 execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.LevelHeightTolerance; + //report.testType = TestType.Building_Story_Z_Height; + //units = DOEgbXMLBasics.MeasurementUnits.ft.ToString(); + report = GetStoryHeights(gbXMLdocs, gbXMLnsm, report, validatorLength, testLength, testlengthConversion, standardlengthConversion); + logger.Info("START: Building Storey Height Test"); + var storyHeightsres = GetStoryHeights(gbXMLdocs, gbXMLnsm, ref camprep, validatorLength, testLength, testlengthConversion, standardlengthConversion, DOEgbXMLBasics.Tolerances.LevelHeightTolerance); + if (!storyHeightsres) + { + camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //TODO: Need a Building Story Summary Field + } + logger.Info("END: Building Storey Height Test"); + //AddToOutPut("Building Story Z-Height Test: ", report, true); + + + //Test 6 execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.VectorAngleTolerance; + //report.testType = TestType.Building_Story_PolyLoop_RHR; + //units = "degrees"; + //report = TestBuildingStoryRHR(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: Building Story Right Hand Rule Test."); + var blstRHResult = TestBuildingStoryRHR(gbXMLdocs, gbXMLnsm, ref camprep); + if(!blstRHResult) + { + //this method has no bearing on the overall pass or fail tests. + //camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //TODO: Need a Building Story Summary Field + } + logger.Info("END: Building Story Right Hand Rule Test."); + //AddToOutPut("Building Story PolyLoop Right Hand Rule Test Result:", report, true); + + + //String spShellGeometrySurfaceNum = TestShellGeomSurfaceNum(gbXMLTestFile, gbXMLns); + + //Space Tests ............................................................. + //Test 7 execute + //only needs to test the test file + //report.Clear(); + //report.testType = TestType.SpaceId_Match_Test; + logger.Info("START: UNIQUE SPACE ID TEST"); + var spaceIDresults = UniqueSpaceIdTest(gbXMLdocs, gbXMLnsm, ref camprep); + if(!spaceIDresults) + { + camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + } + logger.Info("END: UNIQUE SPACE ID TEST"); + //AddToOutPut("SpaceId Match Test: ", report, true); + + + //Test 8 execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.SpaceAreaTolerance; + //report.testType = TestType.Space_Area; + //units = DOEgbXMLBasics.MeasurementUnits.sqft.ToString(); + logger.Info("START: SPACE AREAS TEST"); + //report = TestSpaceAreas(gbXMLdocs, gbXMLnsm, report, validatorArea, testArea, testareaConversion,standardareaConversion,afile); + var result = TestSpaceAreas(gbXMLdocs, gbXMLnsm, ref camprep, validatorArea, testArea, testareaConversion, standardareaConversion, afile, DOEgbXMLBasics.Tolerances.SpaceAreaTolerance); + if(!result) + { + camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //gbxml detailed compliance results? + } + logger.Info("END: SPACE AREAS TEST"); + //AddToOutPut("Space Areas Test: ", report, true); + + + //Test 9 execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.VolumeTolerance; + //report.testType = TestType.Space_Volume; + //units = DOEgbXMLBasics.MeasurementUnits.cubicft.ToString(); + logger.Info("START: SPACE VOLUMES TEST"); + //report = TestSpaceVolumes(gbXMLdocs, gbXMLnsm, report, validatorVol, testVol, testvolumeConversion,standardvolConversion,afile); + var volresult = TestSpaceVolumes(gbXMLdocs, gbXMLnsm, ref camprep, validatorVol, testVol, testvolumeConversion, standardvolConversion, afile, DOEgbXMLBasics.Tolerances.VolumePercentageTolerance); + logger.Info("END: SPACE VOLUMES TEST"); + if (!volresult) + { + camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //gbxml detailed compliance results? + } + //AddToOutPut("Space Volumes Test: ", report, true); + + + //Test 10 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.VectorAngleTolerance; + //report.testType = TestType.Shell_Geom_RHR; + //units = "degrees"; + //report = TestShellGeomPLRHR(gbXMLdocs, gbXMLnsm, report, units); + //AddToOutPut("Shell Geometry RHR Test: ",report); + + //Surface Element tests + //deprecating all counts tests as criteria for passing and failing. Use this now only to indicate counts in the surfaces summary + ////Test 11 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.SurfaceCountTolerance; + //report.testType = TestType.Total_Surface_Count; + //units = ""; + //report = GetSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + //AddToOutPut("Surface Count Test Result: ", report, true); + + + ////Surface Element tests + ////Test 12 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.ExteriorWallCountTolerance; + //report.testType = TestType.Exterior_Wall_Surface_Count; + //units = ""; + SurfaceSummary ss = new SurfaceSummary(); + ss.FileType = "Standard"; + camprep.SurfacesSummary.Add(ss); + SurfaceSummary sst = new SurfaceSummary(); + sst.FileType = "Test"; + camprep.SurfacesSummary.Add(sst); //initialization of summaries complete + logger.Info("START: EXTERIOR WALL COUNT"); + //report = GetEWSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + var ewctresult = GetEWSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + logger.Info("END: EXTERIOR WALL COUNT"); + if (!ewctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + //AddToOutPut("Exterior Wall Surface Count Test Result: ", report, true); + + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.SurfaceCountTolerance; + //report.testType = TestType.Underground_Surface_Count; + //units = ""; + //report = GetUGSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: UNDERGROUND WALL COUNT"); + var ugwctresult = GetUGSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!ugwctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: UNDERGROUND WALL COUNT"); + //AddToOutPut("Underground Wall Count Test Result: ", report, true); + + logger.Info("START: SLABONGRADE COUNT"); + var sogctresult = GetSOGSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!sogctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: SLABONGRADE WALL COUNT"); + + ////Surface Element tests + ////Test 13 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; + //report.testType = TestType.Interior_Wall_Surface_Count; + //units = ""; + logger.Info("START: INTERIOR WALL COUNT"); + //report = GetIWSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + var iwctresult = GetIWSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!iwctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: INTERIOR WALL COUNT"); + //AddToOutPut("Interior Wall Surface Count Test Result: ", report, true); + + ////Surface Element tests + ////Test 13 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorFloorCountTolerance; + //report.testType = TestType.Interior_Floor_Surface_Count; + //units = ""; + //report = GetIFSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: INTERIOR FLOOR/CEILING COUNT"); + var ifctresult = GetIFSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!ifctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: INTERIOR FLOOR/CEILING COUNT"); + + //AddToOutPut("Interior Floor Surface Count Test Result: ", report, true); + + + ////Surface Element tests + ////Test 14 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; + //report.testType = TestType.Roof_Surface_Count; + //units = ""; + //report = GetRoofSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: ROOF COUNT"); + var irctresult = GetRoofSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!irctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: ROOF COUNT"); + //AddToOutPut("Roof Surface Count Test Result: ", report, true); + + + ////Surface Element tests + ////Test 15 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; + //report.testType = TestType.Shading_Surface_Count; + //units = ""; + //report = GetShadeSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: SHADING DEVICE COUNT"); + var shadectresult = GetShadeSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!shadectresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + //AddToOutPut("Shading Surface Count Test Result: ", report, true); + logger.Info("END: SHADING DEVICE COUNT"); + + ////Test 16 Execute + //report.Clear(); + //report.tolerance = DOEgbXMLBasics.Tolerances.AirWallCountTolerance; + //report.testType = TestType.Air_Surface_Count; + //units = ""; + //report = GetAirSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); + logger.Info("START: AIR SURFACE COUNT"); + var asctresult = GetAirSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); + if (!asctresult) + { + //do nothing, it has no consequence for now + //gbxml detailed compliance results? + } + logger.Info("END: AIR SURFACE COUNT"); + //AddToOutPut("Air Surface Count Test Result: ", report, true); + + + #region surface detailed test + //Jan 31-2012: We may not want to perform these if the surface counts fail, but for now, we will include these tests + //Detailed Surface Checks + //Store Surface Element Information + List TestSurfaces = new List(); + XmlDocument TestFile = gbXMLdocs[0]; + XmlNamespaceManager TestNSM = gbXMLnsm[0]; + List StandardSurfaces = new List(); + XmlDocument StandardFile = gbXMLdocs[1]; + XmlNamespaceManager StandardNSM = gbXMLnsm[1]; + TestSurfaces = GetFileSurfaceDefs(TestFile, TestNSM); + StandardSurfaces = GetFileSurfaceDefs(StandardFile, StandardNSM); + string TestSurfaceTable = "
" + "Test Section Name" + "" + "Standard Result" + "" + "Test File Result" + "" + "Tolerances" + "" + "Pass/Fail" + "
"; + TestSurfaceTable += "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + ""; + //Test Surfaces Planar Test + //all polyloops must be such that the surface defined by the coordinates is planar + //report.Clear(); + //report.testType = TestType.Surface_Planar_Test; + logger.Info("START: SURFACE PLANARITY TEST"); + //report = TestSurfacePlanarTest(TestSurfaces, report); + var planarityResult = TestSurfacePlanarTest(TestSurfaces,ref camprep); + if (!planarityResult) + { + camprep.SurfacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; + //gbxml detailed compliance results? + } + logger.Info("END: SURFACE PLANARITY TEST"); + + //if (!report.passOrFail && mustBePlanar) + //{ + // AddToOutPut("Test File Planar Surface Check: ", report, true); + // report.Clear(); + //} + + + //only run detailed surface checks if the surfaces are planar + if(planarityResult) + { + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //Execute Tests + + + // globalMatchObject.MatchedSurfaceIds = new Dictionary>(); + int i = 1; + + foreach (SurfaceDefinitions surface in StandardSurfaces) + { + report.Clear(); + DetailedSurfaceSummary ssSummary = new DetailedSurfaceSummary(); + //multiple tolerances used + report.testType = TestType.Detailed_Surface_Checks; + report.subTestIndex = i; + if (surface.SurfaceId == "su-zone_5_Srf_7" || surface.SurfaceId == "su-zone_0_Srf_0") + { + var d = 1; + } + logger.Info("START: DETAILED SURFACE TEST"); + GetSurfaceMatches(surface, TestSurfaces, ref ssSummary, validatorLength, testLength, testlengthConversion, standardlengthConversion, validatorArea, testArea, testareaConversion, standardareaConversion); + logger.Info("END: DETAILED SURFACE TEST"); + camprep.SurfacesReport.Add(ssSummary); + + } + #endregion + + + + + + #region opending detailed test + //openings detailed tests + List TestOpenings = new List(); + XmlDocument testFile = gbXMLdocs[0]; + XmlNamespaceManager testNSM = gbXMLnsm[0]; + List StandardOpenings = new List(); + XmlDocument standardFile = gbXMLdocs[1]; + XmlNamespaceManager standardNSM = gbXMLnsm[1]; + TestOpenings = GetFileOpeningDefs(TestFile, TestNSM); + StandardOpenings = GetFileOpeningDefs(StandardFile, StandardNSM); + + string TestOpeningTable = ""; + report.Clear(); + report.testType = TestType.Opening_Planar_Test; + report = TestOpeningPlanarTest(TestOpenings, report); + + if (!report.passOrFail) + { + AddToOutPut("Test File Planar Opening Check: ", report, true); + report.Clear(); + } + //only run detailed opening checks if the opening are planar + else + { + TestOpeningTable = "
" + "Test Section Name" + "" + "Stand Surface ID" + "" + "Test Surface ID" + "" + "Stand Surface Tilt" + "" + "Test Surface Tilt" + "" + "Stand Surface Azimuth" + "" + "Test Surface Azimuth" + "" + "Stand Surface Height" + "" + "Test Surface Height" + "" + "Stand Surface Width" + "" + "Test Surface Width" + "" + "Pass/Fail" + "
"; + TestOpeningTable += "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + ""; + + globalMatchObject.MatchedOpeningIds = new Dictionary>(); + int j = 1; + //if no openings remove the table. + if (StandardOpenings.Count < 1) + TestOpeningTable = ""; + //compare the openings + foreach (OpeningDefinitions opening in StandardOpenings) + { + report.Clear(); + + report.testType = TestType.Detailed_Opening_Checks; + report.subTestIndex = j; + + report = GetPossibleOpeningMatches(opening, TestOpenings, report); + + AddToOutPut("Test 17 for Opening number " + j + " Result: ", report, false); + + foreach (OpeningDefinitions to in TestOpenings) + { + if (globalMatchObject.MatchedOpeningIds.ContainsKey(opening.OpeningId)) + { + foreach (string id in globalMatchObject.MatchedOpeningIds[opening.OpeningId]) + { + if (to.OpeningId == id) + { + if (report.passOrFail) + TestOpeningTable += "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + ""; + } + + } + + } + } + //if didn't find match means it failed the test + if (!report.passOrFail) + TestOpeningTable += "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + + ""; + j += 1; + + } + } + TestOpeningTable += "
" + "Test Section Name" + "" + "Standard Opening Id" + "" + "Test Opening Id" + "" + "Standard Parent Surface Id" + "" + "Test Parent Surface Id" + "" + "Standard Parent Azimuth" + "" + "Test Parent Azimuth" + "" + "Standard Parent Tilt" + "" + "Test Parent Tilt" + "" + "Standard Surface Area" + "" + "Test Surface Area" + "" + "Pass/Fail" + "
" + "" + + "Detailed Opening Checks " + report.subTestIndex + "" + "" + opening.OpeningId + "" + to.OpeningId + "" + opening.ParentSurfaceId + "" + to.ParentSurfaceId + "" + String.Format("{0:#,0.00}", opening.ParentAzimuth) + "" + String.Format("{0:#,0.00}", to.ParentAzimuth) + "" + String.Format("{0:#,0.00}", opening.ParentTilt) + "" + String.Format("{0:#,0.00}", to.ParentTilt) + "" + String.Format("{0:#,0.00}", opening.surfaceArea) + "" + String.Format("{0:#,0.00}", to.surfaceArea) + "" + "Pass" + "
" + "" + + "Detailed Opening Checks " + report.subTestIndex + "" + "" + opening.OpeningId + "" + "---" + "" + opening.ParentSurfaceId + "" + "---" + "" + String.Format("{0:#,0.00}", opening.ParentAzimuth) + "" + "---" + "" + String.Format("{0:#,0.00}", opening.ParentTilt) + "" + "---" + "" + String.Format("{0:#,0.00}", opening.surfaceArea) + "" + "---" + "" + "Fail" + "

"; + #endregion + + //close table + table += "
"; + //add TestSurfaceTable + table += TestSurfaceTable + TestOpeningTable; + + + } + //CreateSummaryTable(); + ((FileAppender)LogManager.GetCurrentLoggers()[0].Logger.Repository.GetAppenders()[0]).Close(); + + } + + private string MakeViewJson(string xmljson) + { + string retstring = String.Empty; + JObject ob = JObject.Parse(xmljson); + var gbxml = ob["gbXML"]; + JObject root = new JObject(); + root["title"] = "gbXML"; + root["id"] = "menuTitle"; + JArray rootChildren = new JArray(); + root["items"] = rootChildren; //added temp enums, the first child of gbXML + + JObject uomroot = new JObject(); + rootChildren.Add(uomroot); + + uomroot["name"] = "Units of Measure"; + uomroot["id"] = "UoM"; + JArray uomrootItems = new JArray(); + uomroot["items"] = uomrootItems; + + JObject uom = new JObject(); + uomrootItems.Add(uom); //quirk of the framework, + + uom["title"] = uomroot["name"]; + uom["id"] = String.Empty; + JArray uomitems = new JArray(); + uom["items"] = uomitems; + + JObject t = new JObject(); + uomitems.Add(t); + + t["name"] = "Temperature"; + t["id"] = "temperatureEnum"; + + var campus = gbxml["Campus"]; + JObject campusRoot = new JObject(); + rootChildren.Add(campusRoot); //added campus, the other child of gbXML + campusRoot["name"] = "Campus"; + campusRoot["id"] = "campus"; + + JArray campusrootItems = new JArray(); + campusRoot["items"] = campusrootItems; + + JObject campusTitle = new JObject(); + campusrootItems.Add(campusTitle); + campusTitle["title"] = "Campus"; + campusTitle["id"] = String.Empty; + JArray campusItems = new JArray(); + campusTitle["items"] = campusItems; + + var building = campus["Building"]; + JObject buildingroot = new JObject(); + campusItems.Add(buildingroot); //added building to campus, its first child + + buildingroot["name"] = "Building"; + buildingroot["id"] = "building"; + JArray buildingrootItems = new JArray(); + buildingroot["items"] = buildingrootItems; + + JObject build = new JObject(); + buildingrootItems.Add(build); + build["title"] = "Building"; + build["id"] = String.Empty; + + JArray buildItems = new JArray(); + build["items"] = buildItems; + + JObject strysroot = new JObject(); + buildItems.Add(strysroot); + strysroot["name"] = "Building Stories"; + strysroot["id"] = "buildingStories"; + + JArray storyrootItems = new JArray(); + strysroot["items"] = storyrootItems; + + JObject stry = new JObject(); + storyrootItems.Add(stry); + stry["title"] = "Building Stories"; + stry["id"] = String.Empty; + + JArray storyItems = new JArray(); + stry["items"] = storyItems; + + var stories = building["BuildingStorey"]; + for(int st = 0; st< stories.Count(); st++) + { + JObject storyobject = new JObject(); + storyobject["name"] = stories[st]["@id"].ToString(); + storyobject["id"] = stories[st]["@id"].ToString(); + storyItems.Add(storyobject); + } + + JObject spacesRoot = new JObject(); + buildItems.Add(spacesRoot); + spacesRoot["name"] = "Spaces"; + spacesRoot["id"] = "spaces"; + JArray spacesRootItems = new JArray(); + spacesRoot["items"] = spacesRootItems; + + JObject spaceItem = new JObject(); + spacesRootItems.Add(spaceItem); + spaceItem["title"] = "Spaces"; + spaceItem["id"] = String.Empty; + + JArray spaceItems = new JArray(); + spaceItem["items"] = spaceItems; + + var spaces = building["Space"]; + for (int s = 0; s < spaces.Count(); s++) + { + var space = spaces[s]; + var name = space["@id"].ToString(); + JObject spobj = new JObject(); + spobj["name"] = name; + spobj["id"] = name; + spaceItems.Add(spobj); + + } + + JObject surfacesRoot = new JObject(); + surfacesRoot["name"] = "Surfaces"; + surfacesRoot["id"] = "surfaces"; + campusItems.Add(surfacesRoot); + + JArray surfacesRootItems = new JArray(); + surfacesRoot["items"] = surfacesRootItems; + + JObject surfaceChild = new JObject(); + surfacesRootItems.Add(surfaceChild); + surfaceChild["title"] = "Surfaces"; + surfaceChild["id"] = String.Empty; + + JArray surfaceChildItems = new JArray(); + surfaceChild["items"] = surfaceChildItems; + + //JArrays + var surfaces = campus["Surface"]; + for(int sf = 0; sf< surfaces.Count(); sf++) + { + JObject sfobj = new JObject(); + sfobj["name"] = surfaces[sf]["@id"]; + sfobj["id"] = surfaces[sf]["@id"]; + surfaceChildItems.Add(sfobj); + } + string output = JsonConvert.SerializeObject(root); + + retstring = output; + + + return retstring; + } + + private void AddToOutPut(string title, DOEgbXMLReportingObj report, bool createTable) + { + //add report to report list + //have to deep copy the report before put report in the list + DOEgbXMLReportingObj tmpreport = report.Copy(); + ReportList.Add(tmpreport); + + //title + output += "

" + title + "

"; + log += title + System.Environment.NewLine; + + //message + var passTest = report.TestPassedDict.Values; + bool individualTestBool = true; + foreach (bool testResult in passTest) + { + if (testResult == false) + { + individualTestBool = false; + break; + } + } + if (report.passOrFail && individualTestBool) + output += "

" + report.longMsg + "

"; + else + { + output += "

" + report.longMsg + "

"; + overallPassTest = false; + } + + log += report.longMsg + System.Environment.NewLine; + + //message list, print out each message in the list if there are any + if (report.MessageList.Count > 0) + for (int i = 0; i < report.MessageList.Count; i++) + { + output += "

" + report.MessageList[i] + "

"; + log += report.MessageList[i] + System.Environment.NewLine; + } + + output += "
"; + log += System.Environment.NewLine; + + //create table row + if (createTable) + { + + if (report.standResult.Count == 0) + { + report.standResult.Add("---"); + report.testResult.Add("---"); + report.idList.Add(""); + } + + //for eachout put + for (int i = 0; i < report.standResult.Count; i++) + { + bool sameString = false; + if (report.standResult[i] == report.testResult[i]) + sameString = true; + + //check if test pass or fail + if ((report.passOrFail && individualTestBool) || sameString) + table += ""; + else + { + table += ""; + overallPassTest = false; + } + + table += "" + "" + title + " " + report.idList[i] + "" + ""; + + if ((report.passOrFail && individualTestBool) || sameString) + { + table += "" + report.standResult[i] + " " + report.unit + "" + + "" + report.testResult[i] + " " + report.unit + "" + + "" + "±" + report.tolerance + " " + report.unit + "" + + "Pass" + + ""; + } + else + table += "" + report.standResult[i] + " " + report.unit + "" + + "" + report.testResult[i] + " " + report.unit + "" + + "" + "±" + report.tolerance + " " + report.unit + "" + + "Fail" + + ""; + + } + } + + } + private void CreateSummaryTable() + { + //create overall summary table + //find the right testdetail + //check if the user pass the test + bool passTest = true; + bool aceTest = true; + foreach (DOEgbXMLReportingObj tmpreport in ReportList) + { + if (TestCriteria.TestCriteriaDictionary.ContainsKey(tmpreport.testType)) + { + if (TestCriteria.TestCriteriaDictionary[tmpreport.testType] && !tmpreport.passOrFail) + passTest = false; + if (!TestCriteria.TestCriteriaDictionary[tmpreport.testType] && !tmpreport.passOrFail) + aceTest = false; + } + else if (tmpreport.testType == TestType.Detailed_Surface_Checks) + { + + } + + else + { + + } + } + foreach (DOEgbXMLTestDetail detail in TestDetail.TestDetailList) + if (detail.testName == TestToRun) + { + summaryTable = "

Result Summary

"; + summaryTable += "
"; + + summaryTable += "" + + "" + + "" + + "" + + ""; + + if (passTest && aceTest) + summaryTable += ""; + else if (passTest) + summaryTable += ""; + else + summaryTable += ""; + + summaryTable += "" + + ""; + + if (passTest && aceTest) + summaryTable += "" + ""; + else if (passTest) + summaryTable += "" + ""; + else + summaryTable += "" + ""; + + summaryTable += "
" + "gbXML schema Test" + "" + "" + "" + "Pass" + "
" + "gbXML Test" + "" + detail.shortTitle + "" + detail.passString + "
" + "You pass the test with minor errors" + "
" + detail.failString + "

"; + break; + } + } + private bool TestFileIsAvailable() + { + //check if the file available + if (filepaths.ContainsKey(TestToRun)) + { + gbXMLStandardFile.Load(filepaths[TestToRun]); + return true; + } + else + { + //create overall summary table + summaryTable = "

Result Summary

"; + summaryTable += "
"; + + summaryTable += "" + + "" + + "" + + "" + + ""; + + summaryTable += ""; + + summaryTable += "" + + ""; + + summaryTable += "" + ""; + + summaryTable += "
" + "gbXML schema Test" + "" + "" + "" + "Pass" + "
" + "gbXML Test" + "" + "Test File Currently Not available" + "" + "Error Error Error" + "

"; + return false; + } + } + #endregion + + private XmlDocument ConvertMetricToUS(XmlDocument mdoc) + { + XmlDocument ipdoc = new XmlDocument(); + + return ipdoc; + } + + + + #region Test Functions + + private DOEgbXMLReportingObj GetUGSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"UndergroundWall\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this SurfaceType in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "UndergroundWall") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's " + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " exterior wall surfaces in the Standard File and " + resultsArray[i - 1] + " exterior wall surfaces in the Test File."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Created Jul 2016 by Chien Si Harriman + private bool GetUGSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + //TODO: Consider altering to a dynamic element; + string[] resultsArray = new string[500]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "UndergroundWall") + { + nodecount++; + } + break; + } + } + } + if (i % 2 == 0) + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfUndergroundWalls = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfUndergroundWalls = nodecount; + } + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Underground Wall Count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Underground Wall Count does not match the Standard File exactly."); + + } + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete the Underground Wall Count."); + return false; + } + } + + return true; + } + + //Created Jul 2016 by Chien Si Harriman + private bool GetSOGSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + //TODO: Consider altering to a dynamic element; + string[] resultsArray = new string[500]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "SlabOnGrade") + { + nodecount++; + } + break; + } + } + } + if (i % 2 == 0) + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfSlabsOnGrade = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfSlabsOnGrade = nodecount; + } + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Slab On Grade Count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Slab On Grade Count does not match the Standard File exactly."); + + } + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete the Slab On Grade Count."); + return false; + } + } + + return true; + } + + public void StandardizeToUSIP(XmlNodeList nodes, Conversions c, ref double lengthConv, ref double areaConv, ref double volConv, ref Conversions.lengthUnitEnum lengthEnum, ref Conversions.areaUnitEnum areaEnum, ref Conversions.volumeUnitEnum volEnum) + { + foreach (XmlNode Node in nodes) + { + XmlAttributeCollection spaceAtts = Node.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "volumeUnit") + { + string type = at.Value; + volEnum = (Conversions.volumeUnitEnum)System.Enum.Parse(typeof(Conversions.volumeUnitEnum), type, true); + //we know the test files are in cubic feet + volConv = c.GetVolumeUnitConversion(volEnum, Conversions.volumeUnitEnum.CubicFeet); + if (volConv == -999) + { + //return with an error message stating contact system administrator with a code + } + + } + else if (at.Name == "areaUnit") + { + string type = at.Value; + areaEnum = (Conversions.areaUnitEnum)System.Enum.Parse(typeof(Conversions.areaUnitEnum), type, true); + areaConv = c.GetAreaConversion(areaEnum, Conversions.areaUnitEnum.SquareFeet); + if (areaConv == -999) + { + //return with an error message stating contact system administrator with a code + } + } + else if (at.Name == "lengthUnit") + { + string type = at.Value; + lengthEnum = (Conversions.lengthUnitEnum)System.Enum.Parse(typeof(Conversions.lengthUnitEnum), type, true); + lengthConv= c.GetLengthConversion(lengthEnum, Conversions.lengthUnitEnum.Feet); + if (lengthConv == -999) + { + //return with an error message stating contact system administrator with a code + } + } + } + } + } + + private DOEgbXMLReportingObj GetPossibleOpeningMatches(OpeningDefinitions standardOpening, List TestOpenings, DOEgbXMLReportingObj report) + { + report.testSummary = "This test checks the geometric accuracy of each opening in your test file against the standard file."; + report.testSummary += " For each opening (window, door, skylight) this validator seeks out a similar opening in your test file and"; + //match surfaces at this stage so we know which surface is associated with the window + report.testSummary += " The validator first seeks to find all openings that have a parent surface (roof, external wall, etc.) with"; + report.testSummary += " the same azimuth and tilt. If it finds more than one opening candidate that matches the parent surface tilt and azimuth,"; + report.testSummary += " the validator will make all of these openings possible candidates."; + report.testSummary += " The validator then takes these candidates and looks at their polyloop coordinates. "; + report.testSummary += " and will keep only those openings that have similar polyLoop coordinates"; + report.testSummary += " Next it matches the area, then the width and height, if applicable, and finally checks the insertion"; + report.testSummary += " point coordinates. If all of these come back within tolerance, the opening has found a match."; + report.testSummary += " Otherwise, the test will fail."; + report.testSummary += " The summary at the bottom of the page will show the logic of how the test arrived at its conclusion."; + + + bool matchedParentAz = false; + bool matchedParentTilt = false; + bool matchedPolyLoopCoords = false; + + List possibleMatches = new List(); + List possibleMatches2 = new List(); + try + { + //find match of parent surface + //try matching based on the surface matches + //if that does not work, then just try to match the parent tilt and parent azimuth to one another + int i = 0; + report.MessageList.Add("Starting Parent Azimuth and Tilt Match test...."); + report.MessageList.Add("
"); + while (true) + { + //reset + matchedParentAz = false; + matchedParentTilt = false; + OpeningDefinitions testOpening = TestOpenings[i]; + if (testOpening.ParentAzimuth == standardOpening.ParentAzimuth && testOpening.ParentTilt == standardOpening.ParentTilt) + { + report.MessageList.Add("Candidate Found. Test file opening has EXACTLY matched its parent surface azimuth and tilt with the standard opening parent surface azimuth and tilt."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + testOpening.ParentSurfaceId + ", " + testOpening.ParentAzimuth + ", " + testOpening.ParentTilt + "]"); + report.MessageList.Add("Standard Opening " + standardOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + standardOpening.ParentSurfaceId + "," + standardOpening.ParentAzimuth + ", " + standardOpening.ParentTilt + "]"); + + matchedParentAz = true; + matchedParentTilt = true; + } + else + { + double azDifference = Math.Abs(testOpening.ParentAzimuth - standardOpening.ParentAzimuth); + double tiltDifference = Math.Abs(testOpening.ParentTilt - standardOpening.ParentTilt); + if (azDifference < DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance && tiltDifference < DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) + { + report.MessageList.Add("Candidate found. Test file opening HAS matched WITHIN ALLOWABLE TOLERANCE its parent surface azimuth and tilt with the standard opening parent surface azimuth and tilt."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + testOpening.ParentSurfaceId + ", " + testOpening.ParentAzimuth + ", " + testOpening.ParentTilt + "]"); + report.MessageList.Add("Standard Opening " + standardOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + standardOpening.ParentSurfaceId + "," + standardOpening.ParentAzimuth + ", " + standardOpening.ParentTilt + "]"); + + matchedParentAz = true; + matchedParentTilt = true; + } + else + { + report.MessageList.Add("Candidate rejected. Test file opening HAS NOT matched WITHIN ALLOWABLE TOLERANCE its parent surface azimuth and tilt with the standard opening parent surface azimuth and tilt."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + testOpening.ParentSurfaceId + ", " + testOpening.ParentAzimuth + ", " + testOpening.ParentTilt + "]"); + report.MessageList.Add("Standard Opening " + standardOpening.OpeningId + "'s [parent, azimuth, tilt]: [" + standardOpening.ParentSurfaceId + "," + standardOpening.ParentAzimuth + ", " + standardOpening.ParentTilt + "]"); + report.MessageList.Add("
"); + } + } + + if (matchedParentAz && matchedParentTilt) + { + possibleMatches.Add(testOpening); + report.MessageList.Add("Successful Match Candidate Identified."); + report.MessageList.Add("
"); + } + i++; + + if (i == TestOpenings.Count) + { + if (possibleMatches.Count == 0) + { + //no candidates found + report.MessageList.Add("No candidates found in the test file to match standard file opening " + standardOpening.OpeningId); + report.passOrFail = false; + report.longMsg = "Test to find suitable opening candidate in the test file has failed. Parent Tilt and Azimuth matches could not be established."; + //no need to go further + return report; + } + break; + } + + } + report.MessageList.Add("
"); + report.MessageList.Add("Starting Opening PolyLoop Coordinate Match test........."); + i = 0; + while (true) + { + OpeningDefinitions testOpening = possibleMatches[i]; + //continue to next test + + //continue the next batch of tests + //polyloop absolute coordinates + //check the polyLoop coordinates + foreach (Vector.MemorySafe_CartCoord standardPolyLoopCoord in standardOpening.PlCoords) + { + report = GetOpeningPolyLoopCoordMatch(standardPolyLoopCoord, testOpening, report, standardOpening.OpeningId); + if (report.passOrFail) + { + matchedPolyLoopCoords = true; + continue; + } + else + { + report.MessageList.Add("Could not find a coordinate match in the test opening polyloop."); + matchedPolyLoopCoords = false; + break; + } + } + //if matchePolyLoopCoords comes back true, then a candidate has been found that matches all polyloop coords within tolerance + if (matchedPolyLoopCoords == true) + { + possibleMatches2.Add(testOpening); + } + i++; + + if (i == possibleMatches.Count) + { + if (possibleMatches2.Count == 0) + { + report.MessageList.Add("No candidates found in the test file to match standard file opening " + standardOpening.OpeningId); + report.passOrFail = false; + report.longMsg = "Test to find suitable opening candidate in the test file has failed. Parent Tilt and Azimuth matches were established, but these candidates did not produce good polyLoop coordinate matches."; + //no need to go further + return report; + } + break; + } + } + //next set of tests + //polyloop area tests + report.MessageList.Add("
"); + report.MessageList.Add("Starting Opening Surface Area Match test........."); + possibleMatches.Clear(); + i = 0; + while (true) + { + #region + OpeningDefinitions testOpening = possibleMatches2[i]; + + if (Math.Abs(standardOpening.PlRHRVector.X) == 1 && standardOpening.PlRHRVector.Y == 0 && standardOpening.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in standardOpening.PlCoords) + { + //only take the Y and Z coordinates and throw out the X because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); + coordList.Add(c2); + + } + double area = Math.Abs(GetAreaFrom2DPolyLoop(coordList)); + standardOpening.surfaceArea = area; + if (area == -999) + { + //these messages should never occur and are a sign of some sort of serious, as of yet unknown error + //March 20 2013 + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + report.longMsg = "Fatal error. Please contact gbXML administrator"; + report.passOrFail = false; + return report; + + } + double testOpeningArea = 0; + + if (Math.Abs(testOpening.PlRHRVector.X) == 1 && testOpening.PlRHRVector.Y == 0 && + testOpening.PlRHRVector.Z == 0) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testOpening.PlCoords) + { + Vector.MemorySafe_CartCoord o2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); + testCoordList.Add(o2); + } + testOpeningArea = Math.Abs(GetAreaFrom2DPolyLoop(testCoordList)); + testOpening.surfaceArea = testOpeningArea; + if (testOpeningArea == -999) + { + //these messages should never occur and are a sign of some sort of serious, as of yet unknown error + //March 20 2013 + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.longMsg = "Fatal error. Please contact gbXML administrator"; + report.passOrFail = false; + return report; + } + double difference = Math.Abs(area) - Math.Abs(testOpeningArea); + if (difference < Math.Abs(area) * DOEgbXMLBasics.Tolerances.OpeningAreaPercentageTolerance) + { + + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard opening: " + standardOpening.OpeningId + " exactly."); + possibleMatches.Add(testOpening); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard opening: " + standardOpening.OpeningId + " within the allowable area percentage tolerance."); + possibleMatches.Add(testOpening); + } + } + else + { + report.MessageList.Add("The standard file opening cannot find a match for its surface area of opening: " + standardOpening.OpeningId + " through a comparison of its polyloop coordinates with test opening: " + testOpening.OpeningId); + //don't return here, it will be returned below + } + } + else + { + //by definition, the Window opening should always use coordinates that create a normal vector that points in the + //positive or negative X direction. If the test file does not do this, then this is in violation of the + //gbXML spec + report.longMsg = ("This test has failed because the test opening" + testOpening.OpeningId + "has polyloop coordinates "); + report.longMsg += (" that do not have the same normal vector as the standard opening."); + report.passOrFail = false; + } + } + else if (standardOpening.PlRHRVector.X == 0 && Math.Abs(standardOpening.PlRHRVector.Y) == 1 && standardOpening.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in standardOpening.PlCoords) + { + //only take the Y and Z coordinates and throw out the X because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); + coordList.Add(c2); + + } + double area = Math.Abs(GetAreaFrom2DPolyLoop(coordList)); + standardOpening.surfaceArea = area; + if (area == -999) + { + //these messages should never occur and are a sign of some sort of serious, as of yet unknown error + //March 20 2013 + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + report.longMsg = "Fatal error. Please contact gbXML administrator"; + report.passOrFail = false; + return report; + + } + double testOpeningArea = 0; + + if (testOpening.PlRHRVector.X == 0 && Math.Abs(testOpening.PlRHRVector.Y) == 1 && + testOpening.PlRHRVector.Z == 0) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testOpening.PlCoords) + { + Vector.MemorySafe_CartCoord o2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); + testCoordList.Add(o2); + } + testOpeningArea = Math.Abs(GetAreaFrom2DPolyLoop(testCoordList)); + testOpening.surfaceArea = testOpeningArea; + if (testOpeningArea == -999) + { + //these messages should never occur and are a sign of some sort of serious, as of yet unknown error + //March 20 2013 + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.longMsg = "Fatal error. Please contact gbXML administrator"; + report.passOrFail = false; + return report; + } + double difference = Math.Abs(area) - Math.Abs(testOpeningArea); + if (difference < Math.Abs(area) * DOEgbXMLBasics.Tolerances.OpeningAreaPercentageTolerance) + { + + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " exactly."); + possibleMatches.Add(testOpening); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " within the allowable area percentage tolerance."); + possibleMatches.Add(testOpening); + } + } + else + { + report.MessageList.Add("The standard file opening cannot find a match for its surface area of Opening: " + standardOpening.OpeningId + " through a comparison of its polyloop coordinates with test Opening: " + testOpening.OpeningId); + //don't return here, it will be returned below + } + } + else + { + //by definition, the Window opening should always use coordinates that create a normal vector that points in the + //positive or negative X direction. If the test file does not do this, then this is in violation of the + //gbXML spec + report.longMsg = ("This test has failed because the test opening" + testOpening.OpeningId + "has polyloop coordinates "); + report.longMsg += (" that do not have the same normal vector as the standard opening."); + report.passOrFail = false; + } + } + else if (standardOpening.PlRHRVector.X == 0 && standardOpening.PlRHRVector.Y == 0 && Math.Abs(standardOpening.PlRHRVector.Z) == 1) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in standardOpening.PlCoords) + { + //only take the X and Y coordinates and throw out the Z because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); + coordList.Add(c2); + + } + double area = Math.Abs(GetAreaFrom2DPolyLoop(coordList)); + standardOpening.surfaceArea = area; + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + double testOpeningArea = 0; + + if (testOpening.PlRHRVector.X == 0 && testOpening.PlRHRVector.Y == 0 && + Math.Abs(testOpening.PlRHRVector.Z) == 1) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testOpening.PlCoords) + { + Vector.MemorySafe_CartCoord c02 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); + testCoordList.Add(coord); + } + testOpeningArea = Math.Abs(GetAreaFrom2DPolyLoop(testCoordList)); + testOpening.surfaceArea = testOpeningArea; + if (testOpeningArea == -999) + { + //these messages should never occur and are a sign of some sort of serious, as of yet unknown error + //March 20 2013 + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.longMsg = "Fatal error. Please contact gbXML administrator"; + report.passOrFail = false; + return report; + } + double difference = Math.Abs(area) - Math.Abs(testOpeningArea); + if (difference < Math.Abs(area) * DOEgbXMLBasics.Tolerances.OpeningAreaPercentageTolerance) + { + + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " exactly."); + possibleMatches.Add(testOpening); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " within the allowable area percentage tolerance."); + possibleMatches.Add(testOpening); + } + } + else + { + report.MessageList.Add("The standard file opening cannot find a match for its surface area of Opening: " + standardOpening.OpeningId + " through a comparison of its polyloop coordinates with test Opening: " + testOpening.OpeningId); + //don't return here, it will be returned below + } + } + else + { + //by definition, the Window opening should always use coordinates that create a normal vector that points in the + //positive or negative X direction. If the test file does not do this, then this is in violation of the + //gbXML spec + report.longMsg = ("This test has failed because the test opening" + testOpening.OpeningId + "has polyloop coordinates "); + report.longMsg += (" that do not have the same normal vector as the standard opening."); + report.passOrFail = false; + } + + } + //the opening is not aligned along a reference frame axis + else + { + report.MessageList.Add("This standard Opening is not aligned along a reference plane axis, and will be rotated into a new coordinate frame."); + report.MessageList.Add("Commencing rotation to 2-D."); + //New Z Axis for this plane is the normal vector, does not need to be created + //Get New Y Axis which is the surface Normal Vector cross the original global reference X unit vector (all unit vectors please + + Vector.CartVect globalReferenceX = new Vector.CartVect(); + globalReferenceX.X = 1; + globalReferenceX.Y = 0; + globalReferenceX.Z = 0; + Vector.MemorySafe_CartVect localY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(standardOpening.PlRHRVector, globalReferenceX)); + localY = Vector.UnitVector(localY); + + //new X axis is the localY cross the surface normal vector + Vector.MemorySafe_CartVect localX = Vector.UnitVector(Vector.CrossProduct(localY, standardOpening.PlRHRVector)); + + //convert the polyloop coordinates to a local 2-D reference frame + //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane + List translatedCoordinates = new List(); + Vector.MemorySafe_CartCoord newOrigin = new Vector.MemorySafe_CartCoord(0,0,0); + translatedCoordinates.Add(newOrigin); + for (int j = 1; j < standardOpening.PlCoords.Count; j++) + { + //randomly assigns the first polyLoop coordinate as the origin + Vector.MemorySafe_CartCoord origin = standardOpening.PlCoords[0]; + //captures the components of a vector drawn from the new origin to the + Vector.CartVect distance = new Vector.CartVect(); + distance.X = standardOpening.PlCoords[j].X - origin.X; + distance.Y = standardOpening.PlCoords[j].Y - origin.Y; + distance.Z = standardOpening.PlCoords[j].Z - origin.Z; + //x coordinate is distance vector dot the new local X axis + double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; + //y coordinate is distance vector dot the new local Y axis + double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; + double tZ = 0; + Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); + translatedCoordinates.Add(translatedPt); + + } + double area = GetAreaFrom2DPolyLoop(translatedCoordinates); + standardOpening.surfaceArea = area; + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + //get the area of the test candidates using the polyloop coordinates + Vector.CartVect testglobalReferenceX = new Vector.CartVect(); + globalReferenceX.X = 1; + globalReferenceX.Y = 0; + globalReferenceX.Z = 0; + Vector.MemorySafe_CartVect testlocalY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(testOpening.PlRHRVector, testglobalReferenceX)); + + //new X axis is the localY cross the surface normal vector + Vector.MemorySafe_CartVect testlocalX = Vector.UnitVector(Vector.CrossProduct(testlocalY, testOpening.PlRHRVector)); + + //convert the polyloop coordinates to a local 2-D reference frame + //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane + List testtranslatedCoordinates = new List(); + Vector.MemorySafe_CartCoord newOriginTest = new Vector.MemorySafe_CartCoord(0,0,0); + testtranslatedCoordinates.Add(newOriginTest); + for (int j = 1; j < testOpening.PlCoords.Count; j++) + { + //randomly assigns the first polyLoop coordinate as the origin + Vector.MemorySafe_CartCoord origin = testOpening.PlCoords[0]; + //captures the components of a vector drawn from the new origin to the + Vector.CartVect distance = new Vector.CartVect(); + distance.X = testOpening.PlCoords[j].X - origin.X; + distance.Y = testOpening.PlCoords[j].Y - origin.Y; + distance.Z = testOpening.PlCoords[j].Z - origin.Z; + + //x coordinate is distance vector dot the new local X axis + double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; + //y coordinate is distance vector dot the new local Y axis + double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; + double tZ = 0; + Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); + testtranslatedCoordinates.Add(translatedPt); + + } + double testOpeningArea = GetAreaFrom2DPolyLoop(translatedCoordinates); + testOpening.surfaceArea = testOpeningArea; + if (testOpeningArea == -999) + { + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + } + double difference = Math.Abs(area) - Math.Abs(testOpeningArea); + if (difference < Math.Abs(area) * DOEgbXMLBasics.Tolerances.OpeningAreaPercentageTolerance) + { + + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " exactly."); + possibleMatches.Add(testOpening); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " polyloop surface area matches the polyLoop surface area of the standard Opening: " + standardOpening.OpeningId + " within the allowable area percentage tolerance."); + possibleMatches.Add(testOpening); + } + } + else + { + report.MessageList.Add("The standard file opening cannot find a match for its surface area of Opening: " + standardOpening.OpeningId + " through a comparison of its polyloop coordinates with test Opening: " + testOpening.OpeningId); + //don't return here, it will be returned below + } + + } + i++; + if (i == possibleMatches2.Count) + { + if (possibleMatches.Count == 0) + { + report.MessageList.Add("No area match could be found for standard opening: " + standardOpening.OpeningId + "."); + report.longMsg = "The search routine has ended and could not find a match for opening: " + standardOpening.OpeningId + + ". Attempt to match the area of the standard file with test file openings failed."; + return report; + + } + else + { + //you are good to go with some more matches + report.MessageList.Add("Area matching SUCCESS for standard file Opening id: " + standardOpening.OpeningId); + report.MessageList.Add("Commencing comparisons of height, width, and insertion point."); + break; + } + } + + #endregion + } + //test the width and height, if applicable + report.MessageList.Add("
"); + report.MessageList.Add("Starting Width and Height Match test........."); + possibleMatches2.Clear(); + i = 0; + //surface area using the coordinates of the polyloop. We already assume that they are planar, as previously tested + while (true) + { + //see if the openings are regular + bool isStandardRegular = IsOpeningRegular(standardOpening); + bool isTestRegular = IsOpeningRegular(possibleMatches[i]); + //if they are...go ahead and use width and height, otherwise the values are not reliable + if (isStandardRegular) + { + //output something + if (isTestRegular) + { + //output something + //perform tests + + OpeningDefinitions testOpening = possibleMatches[i]; + double testWidth = testOpening.Width; + double standardWidth = standardOpening.Width; + double testHeight = testOpening.Height; + double standardHeight = standardOpening.Height; + double widthDifference = Math.Abs(testWidth - standardWidth); + double heightDiffefence = Math.Abs(testHeight - standardHeight); + + if (widthDifference <= DOEgbXMLBasics.Tolerances.OpeningWidthTolerance) + { + if (widthDifference == 0) + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " reported Width value matches the Width value of the standard Opening: " + standardOpening.OpeningId + " exactly."); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " reported Width value matches the Width value of the standard Opening: " + standardOpening.OpeningId + " within the allowable tolerance."); + } + //check the height + if (heightDiffefence <= DOEgbXMLBasics.Tolerances.OpeningHeightTolerance) + { + if (heightDiffefence == 0) + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " reported Height value matches the Height value of the standard Opening: " + standardOpening.OpeningId + " exactly."); + possibleMatches2.Add(testOpening); + } + else + { + report.MessageList.Add("The test Opening: " + testOpening.OpeningId + " reported Height value matches the Height value of the standard Opening: " + standardOpening.OpeningId + " within the allowable tolerance."); + possibleMatches2.Add(testOpening); + } + } + else + { + //fail, did not match height + report.MessageList.Add("The standard file Opening: " + standardOpening.OpeningId + "The standard file opening cannot find a match for its surface area of Opening: " + standardOpening.OpeningId + " after comparison its Height value with test opening: " + testOpening.OpeningId); + report.passOrFail = false; + continue; + } + } + else + { + //failed, did not match width + report.MessageList.Add("The standard file Opening: " + standardOpening.OpeningId + " cannot find a match for its width after comparison the width value of test Opening: " + testOpening.OpeningId); + report.passOrFail = false; + continue; + } + } + else + { + //let them know the the test opening is not a square or rectangle, but the standard file opening is + //go ahead and break out of the while loop because we aren't testing for width and height + report.MessageList.Add("The standard file Opening: " + standardOpening.OpeningId + " is a rectangle or square, but the test file Opening: " + standardOpening.OpeningId + " is not. Cannot test for a valid width and height."); + report.MessageList.Add("Searching for another test Opening."); + continue; + } + } + else + { + //tell them that the widths and Heights will Not be checked + //because the standard file opening is not a square or rectangle + report.MessageList.Add("Will not be testing for the Width and Height values for standard Opening: " + standardOpening.OpeningId + ". The Opening is not shaped like a rectangle or square."); + report.MessageList.Add("Going on to check insertion point accuracy."); + //needed to transfer values over to possibleMatches2, so deep copy + possibleMatches2 = new List(possibleMatches); + break; + } + i++; + if (possibleMatches.Count == i) + { + //means that there is no match for width and height + if (possibleMatches2.Count == 0) + { + report.MessageList.Add("There is no match found for the width and height for Opening: " + standardOpening.OpeningId); + report.passOrFail = false; + report.longMsg = "The opening test has ended at the search for width and height values equal to standard Opening: " + standardOpening.OpeningId; + return report; + } + break; + } + + } + report.MessageList.Add("
"); + report.MessageList.Add("Starting Insertion Point Coordinate Match test........."); + possibleMatches.Clear(); + //test the insertion point coordinates + i = 0; + while (true) + { + OpeningDefinitions testOpening = possibleMatches2[i]; + double diffX = Math.Abs(testOpening.InsertionPoint.X - standardOpening.InsertionPoint.X); + double diffY = Math.Abs(testOpening.InsertionPoint.Y - standardOpening.InsertionPoint.Y); + double diffZ = Math.Abs(testOpening.InsertionPoint.Z - standardOpening.InsertionPoint.Z); + + if (diffX <= DOEgbXMLBasics.Tolerances.OpeningSurfaceInsPtXTolerance && diffY <= DOEgbXMLBasics.Tolerances.OpeningSurfaceInsPtYTolerance && + diffZ <= DOEgbXMLBasics.Tolerances.OpeningSurfaceInsPtZTolerance) + { + if (diffX == 0) + { + //perfect X coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point X-Coordinate when compared with test Opening: " + testOpening.OpeningId); + if (diffY == 0) + { + //perfect Y coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Y-Coordinate when compared with test Opening: " + testOpening.OpeningId); + if (diffZ == 0) + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + possibleMatches.Add(testOpening); + + } + else + { + // Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Z-Coordinate when compared with Test opening: " + testOpening.OpeningId); + //we continue because we search for other matches if there are any + possibleMatches.Add(testOpening); + + } + } + else + { + //y-coordinate is within tolerance + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Y-Coordinate when compared with Test opening: " + testOpening.OpeningId); + if (diffZ == 0) + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Z-Coordinate when compared with Test opening: " + testOpening.OpeningId); + possibleMatches.Add(testOpening); + + } + else + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + //we continue because we search for other matches if there are any + possibleMatches.Add(testOpening); + + } + } + + } + // X is within tolerance + else + { + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point X-Coordinate when compared with test Opening: " + testOpening.OpeningId); + if (diffY == 0) + { + //perfect Y coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Y-Coordinate when compared with test Opening: " + testOpening.OpeningId); + if (diffZ == 0) + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + possibleMatches.Add(testOpening); + + } + else + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + //we continue because we search for other matches if there are any + possibleMatches.Add(testOpening); + + } + } + else + { + //y-coordinate is within tolerance + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Y-Coordinate when compared with test Opening: " + testOpening.OpeningId); + if (diffZ == 0) + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a perfect match for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + possibleMatches.Add(testOpening); + + } + else + { + //perfect Z coordinate match + report.MessageList.Add("Standard Opening: " + standardOpening.OpeningId + " has found a match within allowable tolerances for its insertion point Z-Coordinate when compared with test Opening: " + testOpening.OpeningId); + //we continue because we search for other matches if there are any + possibleMatches.Add(testOpening); + + } + } + } + } + report.MessageList.Add("Standard Opening Ins Pt: (" + standardOpening.InsertionPoint.X.ToString() + "," + standardOpening.InsertionPoint.Y.ToString() + "," + standardOpening.InsertionPoint.Z.ToString() + ")"); + report.MessageList.Add("Test File Opening Ins Pt: (" + testOpening.InsertionPoint.X.ToString() + "," + testOpening.InsertionPoint.Y.ToString() + "," + testOpening.InsertionPoint.Z.ToString() + ")"); + i++; + if (possibleMatches2.Count == i) + { + if (possibleMatches.Count == 1) + { + List openingMatch = new List(); + openingMatch.Add(possibleMatches[0].OpeningId); + report.MessageList.Add("Standard file Opening: " + standardOpening.OpeningId + " is matched to test file Opening: " + testOpening.OpeningId); + globalMatchObject.MatchedOpeningIds.Add(standardOpening.OpeningId, openingMatch); + report.passOrFail = true; + return report; + } + else + { + if (possibleMatches.Count == 0) + { + report.MessageList.Add("Standard file Opening: " + standardOpening.OpeningId + " found no match for insertion point in the test file of the remaining candidates."); + report.passOrFail = false; + return report; + } + else + { + report.MessageList.Add("Standard file Opening: " + standardOpening.OpeningId + " is matched to multiple openings:"); + foreach (OpeningDefinitions opening in possibleMatches) + { + report.MessageList.Add("Test Opening:" + opening.OpeningId + "matched insertion point"); + } + //resolve by trying to match to the standard opening and test opening parent surfaces. + //for the standard opening + if (globalMatchObject.MatchedSurfaceIds.ContainsKey(standardOpening.ParentSurfaceId)) + { + List possibleSurfaceMatches = globalMatchObject.MatchedSurfaceIds[standardOpening.ParentSurfaceId]; + if (possibleSurfaceMatches.Count == 1) + { + //then a match was found originally during get possible surface matches. That is good, we only want one + foreach (OpeningDefinitions openingRemaining in possibleMatches) + { + if (openingRemaining.ParentSurfaceId == possibleSurfaceMatches[0]) + { + //this is the match we want + //else we would have to continue + report.MessageList.Add("The test Opening: " + openingRemaining.OpeningId + " has been matched to the standard Opening: " + standardOpening.OpeningId + + ". Their parent surface ids have been matched. Thus the conflict has been resolved. (Standard opening parent surface Id, test opening parent surface Id" + standardOpening.ParentSurfaceId + "," + openingRemaining.ParentSurfaceId); + report.passOrFail = true; + List openingMatch = new List(); + openingMatch.Add(possibleMatches[0].OpeningId); + globalMatchObject.MatchedOpeningIds.Add(standardOpening.OpeningId, openingMatch); + return report; + } + else + { + //do nothing. Maybe report that the parent Surface Id does not match the standard Opening + report.MessageList.Add("Test Opening:" + openingRemaining.OpeningId + " does not match the standard Opening: " + standardOpening.OpeningId + + ". Their parent surface ids do not coincide. (Standard Opening parent surface id, test Opening parent surface id)" + standardOpening.ParentSurfaceId + "," + openingRemaining.ParentSurfaceId); + } + } + } + } + report.passOrFail = false; + return report; + } + } + } + + } + + //finished + + } + catch (Exception e) + { + report.longMsg = e.ToString(); + } + return report; + } + + private List GetFileOpeningDefs(XmlDocument TestFile, XmlNamespaceManager TestNSM) + { + List openings = new List(); + try + { + + XmlNodeList nodes = TestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", TestNSM); + foreach (XmlNode openingNode in nodes) + { + //initialize a new instance of the class + OpeningDefinitions openingDef = new OpeningDefinitions(); + + openingDef.PlCoords = new List(); + + //get parent id + XmlAttributeCollection parentSurfaceAttributes = openingNode.ParentNode.Attributes; + foreach (XmlAttribute parentAt in parentSurfaceAttributes) + { + if (parentAt.Name == "id") + { + openingDef.ParentSurfaceId = parentAt.Value; + break; + } + } + //get Parent Azimuth and Tilt + XmlNode surfaceParentNode = openingNode.ParentNode; + if (surfaceParentNode.HasChildNodes) + { + XmlNodeList surfaceParentNodesChillun = surfaceParentNode.ChildNodes; + foreach (XmlNode chileNode in surfaceParentNodesChillun) + { + if (chileNode.Name == "RectangularGeometry") + { + if (chileNode.HasChildNodes) + { + foreach (XmlNode grandchileNode in chileNode) + { + if (grandchileNode.Name == "Tilt") { openingDef.ParentTilt = Convert.ToDouble(grandchileNode.InnerText); } + else if (grandchileNode.Name == "Azimuth") { openingDef.ParentAzimuth = Convert.ToDouble(grandchileNode.InnerText); } + } + } + } + } + } + else + { + + } + //get surface Id and Opening Type + XmlAttributeCollection openingAtts = openingNode.Attributes; + foreach (XmlAttribute at in openingAtts) + { + if (at.Name == "id") + { + openingDef.OpeningId = at.Value; + } + else if (at.Name == "openingType") + { + openingDef.OpeningType = at.Value; + } + } + if (openingNode.HasChildNodes) + { + XmlNodeList surfChildNodes = openingNode.ChildNodes; + foreach (XmlNode node in surfChildNodes) + { + + if (node.Name == "RectangularGeometry") + { + if (node.HasChildNodes) + { + XmlNodeList rectGeomChildren = node.ChildNodes; + foreach (XmlNode rgChildNode in rectGeomChildren) + { + if (rgChildNode.Name == "Azimuth") { openingDef.Azimuth = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "CartesianPoint") + { + if (rgChildNode.HasChildNodes) + { + XmlNodeList coordinates = rgChildNode.ChildNodes; + int pointCount = 1; + Vector.CartCoord od = new Vector.CartCoord(); + foreach (XmlNode coordinate in coordinates) + { + switch (pointCount) + { + case 1: + od.X = Convert.ToDouble(coordinate.InnerText); + break; + case 2: + od.Y = Convert.ToDouble(coordinate.InnerText); + break; + case 3: + od.Z = Convert.ToDouble(coordinate.InnerText); + break; + } + pointCount++; + } + openingDef.InsertionPoint = new Vector.MemorySafe_CartCoord(od.X, od.Y, od.Z); + } + } + else if (rgChildNode.Name == "Tilt") { openingDef.Tilt = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "Height") { openingDef.Height = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "Width") { openingDef.Width = Convert.ToDouble(rgChildNode.InnerText); } + } + } + } + else if (node.Name == "PlanarGeometry") + { + XmlNode polyLoop = node.FirstChild; + if (polyLoop.HasChildNodes) + { + XmlNodeList cartesianPoints = polyLoop.ChildNodes; + foreach (XmlNode coordinatePt in cartesianPoints) + { + Vector.CartCoord coord = new Vector.CartCoord(); + if (coordinatePt.HasChildNodes) + { + XmlNodeList coordinates = coordinatePt.ChildNodes; + int pointCount = 1; + foreach (XmlNode coordinate in coordinatePt) + { + + switch (pointCount) + { + case 1: + coord.X = Convert.ToDouble(coordinate.InnerText); + break; + case 2: + coord.Y = Convert.ToDouble(coordinate.InnerText); + break; + case 3: + coord.Z = Convert.ToDouble(coordinate.InnerText); + break; + } + pointCount++; + } + openingDef.PlCoords.Add(new Vector.MemorySafe_CartCoord(coord.X,coord.Y,coord.Z)); + } + } + } + } + } + } + Vector.MemorySafe_CartVect plRHRVect = GetPLRHR(openingDef.PlCoords); + openingDef.PlRHRVector = new Vector.MemorySafe_CartVect(plRHRVect.X, plRHRVect.Y, plRHRVect.Z); + //may want to forego the above since the orientation is embedded in the parent object. It may be smarter to just include the azimuth and tilt of the parent object? + openings.Add(openingDef); + } + + return openings; + } + catch (Exception e) + { + return openings; + } + } + + private DOEgbXMLReportingObj TestSurfacePlanarTest(List TestSurfaces, DOEgbXMLReportingObj report) + { + //ensure that each set of RHR tests result in parallel or anti-parallel resultant vectors, or else fail the test + + foreach (SurfaceDefinitions ts in TestSurfaces) + { + Dictionary> surfaceXProducts = new Dictionary>(); + List xProducts = new List(); + for (int i = 0; i < ts.PlCoords.Count - 2; i++) + { + //Get the Cross Product + VectorMath.Vector.CartVect v1 = VectorMath.Vector.CreateVector(ts.PlCoords[i], ts.PlCoords[i + 1]); + VectorMath.Vector.CartVect v2 = VectorMath.Vector.CreateVector(ts.PlCoords[i + 1], ts.PlCoords[i + 2]); + Vector.CartVect xProd = Vector.CrossProduct(v1, v2); + xProd = Vector.UnitVector(xProd); + xProducts.Add(xProd); + } + surfaceXProducts.Add(ts.SurfaceId, xProducts); + for (int j = 0; j < xProducts.Count - 1; j++) + { + //parallel and anti parallel + if (xProducts[j].X == xProducts[j + 1].X && xProducts[j].Y == xProducts[j + 1].Y && xProducts[j].Z == xProducts[j + 1].Z) + { + continue; + } + //anti-parallel + else if (xProducts[j].X == -1 * xProducts[j + 1].X && xProducts[j].Y == -1 * xProducts[j + 1].Y && xProducts[j].Z == -1 * xProducts[j + 1].Z) + { + continue; + } + else if (Math.Abs(xProducts[j].X) - Math.Abs(xProducts[j + 1].X) < .0001 && Math.Abs(xProducts[j].Y) - Math.Abs(xProducts[j + 1].Y) < .0001 && + Math.Abs(xProducts[j].Z) - Math.Abs(xProducts[j + 1].Z) < 0.0001) + { + continue; + } + else + { + report.MessageList.Add("Test file's Surface, id: " + ts.SurfaceId + " has polyLoop coordinates that do not form a planar surface. This fails the detailed surface tests and will not continue."); + report.passOrFail = false; + report.longMsg = "Detailed surface test failed during the planar surface checks. Without planar surfaces, this test cannot be safely executed."; + return report; + } + } + } + report.MessageList.Add("All test file's surfaces have polyloop descriptions that describe a planar surface. Planar surface test succeeded."); + report.passOrFail = true; + return report; + + } + + private bool TestSurfacePlanarTest(List TestSurfaces, ref CampusReport cr) + { + //ensure that each set of RHR tests result in parallel or anti-parallel resultant vectors, or else fail the test + + foreach (SurfaceDefinitions ts in TestSurfaces) + { + Dictionary> surfaceXProducts = new Dictionary>(); + List xProducts = new List(); + for (int i = 0; i < ts.PlCoords.Count - 2; i++) + { + //Get the Cross Product + VectorMath.Vector.CartVect v1 = VectorMath.Vector.CreateVector(ts.PlCoords[i], ts.PlCoords[i + 1]); + VectorMath.Vector.CartVect v2 = VectorMath.Vector.CreateVector(ts.PlCoords[i + 1], ts.PlCoords[i + 2]); + Vector.CartVect xProd = Vector.CrossProduct(v1, v2); + xProd = Vector.UnitVector(xProd); + xProducts.Add(xProd); + } + surfaceXProducts.Add(ts.SurfaceId, xProducts); + for (int j = 0; j < xProducts.Count - 1; j++) + { + //parallel and anti parallel + if (xProducts[j].X == xProducts[j + 1].X && xProducts[j].Y == xProducts[j + 1].Y && xProducts[j].Z == xProducts[j + 1].Z) + { + continue; + } + //anti-parallel + else if (xProducts[j].X == -1 * xProducts[j + 1].X && xProducts[j].Y == -1 * xProducts[j + 1].Y && xProducts[j].Z == -1 * xProducts[j + 1].Z) + { + continue; + } + else if (Math.Abs(xProducts[j].X) - Math.Abs(xProducts[j + 1].X) < .0001 && Math.Abs(xProducts[j].Y) - Math.Abs(xProducts[j + 1].Y) < .0001 && + Math.Abs(xProducts[j].Z) - Math.Abs(xProducts[j + 1].Z) < 0.0001) + { + continue; + } + else + { + logger.Info("TEST FILE FAILURE: " + ts.SurfaceId + " has polyLoop coordinates that do not form a planar surface. This fails the detailed surface tests and will not continue."); + cr.SurfacesSummary.Find(x => x.FileType == "Test").SurfacesArePlanar = false; + return false; + } + } + } + logger.Info("TEST FILE SUCCESS: All test file's surfaces have polyloop descriptions that describe a planar surface. Planar surface test succeeded."); + return true; + + } + + private DOEgbXMLReportingObj TestOpeningPlanarTest(List TestOpenings, DOEgbXMLReportingObj report) + { + //ensure that each set of RHR tests result in parallel or anti-parallel resultant vectors, or else fail the test + + foreach (OpeningDefinitions to in TestOpenings) + { + Dictionary> surfaceXProducts = new Dictionary>(); + List xProducts = new List(); + for (int i = 0; i < to.PlCoords.Count - 2; i++) + { + //Get the Cross Product + VectorMath.Vector.CartVect v1 = VectorMath.Vector.CreateVector(to.PlCoords[i], to.PlCoords[i + 1]); + VectorMath.Vector.CartVect v2 = VectorMath.Vector.CreateVector(to.PlCoords[i + 1], to.PlCoords[i + 2]); + Vector.CartVect xProd = Vector.CrossProduct(v1, v2); + xProd = Vector.UnitVector(xProd); + xProducts.Add(xProd); + } + surfaceXProducts.Add(to.OpeningId, xProducts); + for (int j = 0; j < xProducts.Count - 1; j++) + { + //parallel + if (xProducts[j].X == xProducts[j + 1].X && xProducts[j].Y == xProducts[j + 1].Y && xProducts[j].Z == xProducts[j + 1].Z) + { + continue; + } + //anti-parallel + else if (xProducts[j].X == -1 * xProducts[j + 1].X && xProducts[j].Y == -1 * xProducts[j + 1].Y && xProducts[j].Z == -1 * xProducts[j + 1].Z) + { + continue; + } + else + { + report.MessageList.Add("Test file's Opening, id: " + to.OpeningId + + " has polyLoop coordinates that do not form a planar surface. This fails the detailed surface tests and will not continue."); + report.passOrFail = false; + report.longMsg = "Detailed opening test failed during the planar surface checks. Without planar polygons, this test cannot be safely executed."; + return report; + } + } + } + report.MessageList.Add("All test file's surfaces have polyloop descriptions that describe a plana polygon. Planar opening test succeeded."); + report.passOrFail = true; + return report; + + } + + + //this method relies on an absolute difference for tolerance tests. + public static DOEgbXMLReportingObj GetBuildingArea(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test compares the values stored in the Building Area node of the standard and test gbXML files."; + report.testSummary += " This Building area is the sum total of the areas of all spaces created in gbXML."; + report.testSummary += " For example, if a small building has five spaces of area = 100 square feet each, then the sum of that area is"; + report.testSummary += " 5 x 100 = 500 square feet. The building area value would be 500 square feet."; + report.testSummary += " We have built a tolerance in this test, meaning the building areas do not need to match perfectly in the"; + report.testSummary += " standard file and test file. As long as your test file's value for Building Area is +/- this tolerance, the"; + report.testSummary += " test will pass. Using the previous example, if the allowable tolerance is 1% (1% of 500 is 5 sf), then the test file may have a building area ranging from 495 to 505 square feet, and will still be declared to pass this test."; + + report.unit = standardUnits.ToString(); + string testUOM = testUnits.ToString(); + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + var node = gbXMLDocs[i].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Area", gbXMLnsm[i]); + string area = node.InnerText; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = area; + if (i % 2 != 0) + { + //setup standard result and test result + if (testareaConversion != 1) { report.MessageList.Add("Converted the test file from " + testUOM + " to " + report.unit + "."); } + //apply the conversion factor on the test file always, regardless. + double standardArea = Convert.ToDouble(resultsArray[i]) * standardareaConversion; + double testArea = Convert.ToDouble(resultsArray[(i - 1)]) * testareaConversion; + report.standResult.Add(String.Format("{0:#,0.00}", standardArea.ToString())); + report.testResult.Add(String.Format("{0:#,0.00}", testArea.ToString())); + report.idList.Add(""); + + + double difference = standardArea - testArea; + if (Math.Abs(difference) == 0) + { + report.longMsg = "The test file's " + report.testType + "matches the standard file Building Area exactly."; + report.passOrFail = true; + return report; + } + + else if (Math.Abs(difference) <= report.tolerance) + { + report.longMsg = "The test file's " + report.testType + " is within the allowable tolerance of = " + report.tolerance.ToString() + " " + report.unit; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The test file's " + report.testType + " is not within the allowable tolerance of " + report.tolerance.ToString() + " " + report.unit + "The difference between the standard and test file is " + difference.ToString() + "."; + report.passOrFail = false; + return report; + } + } + else { continue; } + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Created July 2016, Chien Si Harriman + //note we made a change to this method where the tolerance is now based on a percentage to the standard, and is not absolute. + public static bool GetBuildingArea(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion, double tolerance, DOEgbXMLTestDetail t) + { + + //assuming that this will be plenty large for now, all test cases only have one building currently. + string[] resultsArray = new string[50]; + bool thinWalled = false; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnsm[i]); + if (productName.InnerText.ToLower().Replace(" ",String.Empty).Trim() == "openstudio") //TODO: Consider a different test. + { + thinWalled = true; + } + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + var node = gbXMLDocs[i].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Area", gbXMLnsm[i]); + string area = node.InnerText; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = area; + if (i % 2 != 0) + { + //setup standard result and test result + if (testareaConversion != 1) { logger.Info("PROGRAMMER'S NOTE: Converted the test file building area units."); } + //apply the conversion factor on the test file always, regardless. + double standardArea = Convert.ToDouble(resultsArray[i]) * standardareaConversion; + double testArea = Convert.ToDouble(resultsArray[(i - 1)]) * testareaConversion; + if (thinWalled) + { + //no conversion necessary, it was already accounted for when it was entered by the administrator. + if (Math.Abs(testArea - t.thinWalledExpectedBuildingArea)/t.thinWalledAltBuildingArea < tolerance) { testArea = t.thinWalledAltBuildingArea; } + else + { + logger.Info("TEST FILE FAILURE: The test file's Building Area is not within the allowable tolerance of " + tolerance.ToString() + "."); + return false; + } + } + + logger.Info("Standard Building Area: "+String.Format("{0:#,0.00}", standardArea.ToString())); + logger.Info("Test Building Area: "+String.Format("{0:#,0.00}", testArea.ToString())); + + cr.BuildingSummary.Find(x => x.FileType == "Standard").BuildingArea = new Area(standardArea,"Square Feet"); + cr.BuildingSummary.Find(x => x.FileType == "Test").BuildingArea = new Area(testArea, "Square Feet"); + + double difference = Math.Abs(standardArea - testArea)/standardArea; + if (difference == 0) + { + logger.Info("TEST FILE SUCCESS:PERFECT: The test file's Building Area matches the standard file Building Area exactly."); + } + + else if (difference <= tolerance) + { + logger.Info("TEST FILE SUCCESS: The test file's Building Area is within the allowable tolerance of = " + tolerance.ToString()); + } + else + { + logger.Info("TEST FILE FAILURE: The test file's Building Area is not within the allowable tolerance of " + tolerance.ToString() + "."); + return false; + } + } + else { continue; } + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to locate Building Area in the XML file."); + return false; + } + } + + return true; + } + + + public static DOEgbXMLReportingObj GetBuildingSpaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test compares the number of spaces (it counts them) in the standard and test files. It does this"; + report.testSummary = " by counting the number of occurrences of the Space element in the gbXML files. The number of spaces should"; + report.testSummary = " match exactly. If you test has failed, this is because it is required that the space count match. If the number"; + report.testSummary = " of spaces does not match, there could be a number of reasons for this, but most likely, the test file has"; + report.testSummary = " not been constructed as per the instructions provided by the gbXML Test Case Manual."; + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbXMLns); + int nodecount = nodes.Count; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = " The test file's " + report.testType + " matches the standard file exactly."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = " The test file's " + report.testType + " matches the standard file " + report.testType + ", the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The test file's " + report.testType + " is not within the allowable tolerance of " + report.tolerance.ToString() + " " + Units + " The difference between the standard and test file is " + difference.ToString() + " " + Units; + report.passOrFail = false; + return report; + } + } + else { continue; } + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate Building " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + public static bool GetBuildingSpaceCount(List gbXMLDocs, List gbXMLnsm, string Units, int spaceCountTol, ref CampusReport cr) + { + + //TODO: This could be improved. It will fail when the space count exceeds 500. ssuming that this will be plenty large for now + int[] resultsArray = new int[500]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbXMLns); + int nodecount = nodes.Count; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount; + if (i % 2 != 0) + { + //setup standard result and test result + logger.Debug("There are " + resultsArray[i]+" Spaces in the standard file"); + logger.Debug("There are " + resultsArray[i-1] + " Spaces in the uploaded test file"); + + + cr.SpacesSummary.Find(x => x.FileType == "Standard").Count = resultsArray[i]; //TODO: Remove + cr.SpacesSummary.Find(x => x.FileType == "Test").Count = resultsArray[i-1]; //TODO: Remove + + cr.BuildingSummary.Find(x => x.FileType == "Standard").NumberOfSpaces = resultsArray[i]; + cr.BuildingSummary.Find(x => x.FileType == "Test").NumberOfSpaces = resultsArray[i-1]; + + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("TEST FILE SUCCESS: The test file's Space Count matches the standard file exactly."); + } + else if (difference <= spaceCountTol) + { + logger.Info("TEST FILE SUCCESS: The test file's Space Count matches the standard file, the difference was within tolerance = " + spaceCountTol + " " + Units); + } + else + { + logger.Info("TEST FILE FAILURE: The test file's Space Count is not within the allowable tolerance of " + spaceCountTol + " " + Units + " The difference between the standard and test file is " + difference.ToString() + " " + Units); + return false; + } + } + else { continue; } + } + catch (Exception e) + { + logger.Fatal(" Failed to locate Building in the XML file."); + return false; + } + } + return true; + } + + public static DOEgbXMLReportingObj GetBuildingStoryCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test compares the number of stories (it counts them) in the standard and test files. It does so by"; + report.testSummary += " counting the number of occurances of a Building Storey element in the gbXML files."; + report.testSummary += " The number of stories should match exactly. If your test failed, the number of stories in your file does"; + report.testSummary += " not match the standard file. If the number of stories does not match, "; + report.testSummary += " most likely, the test file has not been constructed as per the instructions provided by the"; + report.testSummary += " gbXML Test Case Manual."; + report.testSummary += " In some instances, it is not required that the number of stories match. If you notice that the number"; + report.testSummary += " of stories do not match, but the test summary showed your file passed, then this is normal. Refer to the pass/fail"; + report.testSummary += " summary sheet for more information."; + + report.unit = Units; + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); + int nodecount = nodes.Count; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = " The test file's " + report.testType + " matches the standard file exactly."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = " The test file's " + report.testType + " matches the standard file " + report.testType + ", the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The test file's " + report.testType + " is not within the allowable tolerance of " + report.tolerance.ToString() + " " + Units + " The difference between the standard and test file is " + difference.ToString() + " " + Units; + report.passOrFail = false; + return report; + } + } + else { continue; } + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg += " Failed to locate Building " + report.testType + " in the XML file."; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + return report; + } + + //Created July 2016, Chien Si Harriman + public static bool GetBuildingStoryCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr, int tolerance) + { + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); + int nodecount = nodes.Count; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("TEST FILE SUCCESS:PERFECT : The test file's Building Storey Count matches the standard file exactly."); + } + else if (difference <= tolerance) + { + logger.Info("TEST FILE SUCCESS: The test file's Building Storey Count matches the standard file."); + } + else + { + logger.Info("TEST FILE FAILURE: The test file's Building Storey Count does not match the standard file's ."); + return false; + } + } + else { continue; } + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal("Failed to complete building storey count test in the XML file."); + return false; + } + } + return true; + } + + public static DOEgbXMLReportingObj GetStoryHeights(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, Conversions.lengthUnitEnum standardLength, Conversions.lengthUnitEnum testLength, double testlengthConversion, double standardlengthConversion) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added March 14 2013 + report.testSummary = "This test compares Z-coordinates in each one of the levels of the standard and test file. It does so by"; + report.testSummary += " gathering the Z-coordinate of a Building Storey element's PolyLoop in the gbXML files."; + report.testSummary += " The z-heights should match exactly. If this test has failed, then one of the z-heights in your file does"; + report.testSummary += " not match the standard file. There is no tolerance for error in this test. If any of the z-heights do not match, "; + report.testSummary += " most likely, the test file has not been constructed as per the instructions provided by the"; + report.testSummary += " gbXML Test Case Manual."; + report.testSummary += " In some instances, it is not required that the z-heights match. If you notice that this test has failed"; + report.testSummary += " but your file overall has still passed, then this is as designed. Refer to the pass/fail"; + report.testSummary += " summary sheet for more information."; + + report.unit = standardLength.ToString(); + string testUOM = testLength.ToString(); + //small dictionaries I make to keep track of the story level names and heights + //standard file + Dictionary standardStoryHeight = new Dictionary(); + //Test File + Dictionary testStoryHeight = new Dictionary(); + string key = null; + string val = null; + string standLevel = ""; + + + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + //assuming that this will be plenty large for now + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); + int nodecount = nodes.Count; + foreach (XmlNode node in nodes) + { + XmlNodeList childNodes = node.ChildNodes; + foreach (XmlNode childNode in childNodes) + { + if (childNode.Name.ToString() == "Level") { key = childNode.InnerText; } + else if (childNode.Name.ToString() == "Name") { val = childNode.InnerText; } + else { continue; } + if (i % 2 != 0) + { + if (key != null && val != null) + { + double testlevelvalue = Convert.ToDouble(val) * testlengthConversion; + testStoryHeight.Add(key, testlevelvalue); + key = null; + val = null; + } + else + { + + } + } + else + { + if (key != null && val != null) + { + double standardlevelvalue = Convert.ToDouble(val); + standardStoryHeight.Add(key, standardlevelvalue); + key = null; + val = null; + } + else + { + + } + } + } + } + + //reporting + if (i % 2 != 0) + { + if (standardStoryHeight.Count == 0) + { + report.longMsg = "Test cannot be completed. Standard File Level Count returns Zero."; + report.passOrFail = false; + return report; + } + else if (testStoryHeight.Count == 0) + { + report.longMsg = "Test cannot be completed. Test File Level Count returns Zero."; + report.passOrFail = false; + return report; + } + else + { + //set pass to true + report.passOrFail = true; + int count = 0; + foreach (KeyValuePair standardPair2 in standardStoryHeight) + { + count++; + double difference; + StoryHeightMin = 10000; + string equivLevel = ""; + if (testStoryHeight.ContainsKey(standardPair2.Key)) + { + double matchkeydiff = Math.Abs(standardPair2.Value - testStoryHeight[standardPair2.Key]); + if (matchkeydiff == 0) + { + report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " Exactly"); + report.TestPassedDict.Add(standardPair2.Value.ToString(), true); + continue; + } + else if (matchkeydiff < report.tolerance) + { + report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " within allowable tolerance."); + report.TestPassedDict.Add(standardPair2.Value.ToString(), true); + continue; + } + else + { + report.MessageList.Add("Did NOT Match Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " within allowable tolerance."); + report.TestPassedDict.Add(standardPair2.Value.ToString(), true); + continue; + } + + } + foreach (KeyValuePair testPair in testStoryHeight) + { + //setup standard result and test result + + report.standResult.Add(standardPair2.Key); + report.testResult.Add(testPair.Key); + report.idList.Add(Convert.ToString(count)); + + difference = Math.Abs(Convert.ToDouble(standardPair2.Key)*standardlengthConversion - Convert.ToDouble(testPair.Key) * testlengthConversion); + //store all levels and the difference between them + if (StoryHeightMin > difference) + { + StoryHeightMin = difference; + standLevel = standardPair2.Value.ToString(); + } + } + if (StoryHeightMin < report.tolerance) + { + report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " @ " + standardPair2.Key + report.unit + " within the Tolerance allowed"); + report.TestPassedDict.Add(standLevel, true); + } + else + { + report.MessageList.Add("Standard File's " + standardPair2.Value + " equivalent was not found in the test file. The closest level in the test file was found at " + equivLevel + " in the test file. The difference in heights was " + StoryHeightMin.ToString() + report.unit); + report.TestPassedDict.Add(standLevel, false); + } + + } + return report; + } + } + } + + catch (Exception e) + { + report.longMsg = e.ToString(); + report.MessageList.Add(" Failed to locate Building " + report.testType + " in the XML file."); + report.passOrFail = false; + return report; + } + + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Created July 2016, Chien Si Harriman + public static bool GetStoryHeights(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr, Conversions.lengthUnitEnum standardLength, Conversions.lengthUnitEnum testLength, double testlengthConversion, double standardlengthConversion, double tolerance) + { + + //small dictionaries I make to keep track of the story level names and heights + //standard file + Dictionary standardStoryHeight = new Dictionary(); + //Test File + Dictionary testStoryHeight = new Dictionary(); + string key = null; + string val = null; + string standLevel = ""; + + + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + //assuming that this will be plenty large for now + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); + int nodecount = nodes.Count; + foreach (XmlNode node in nodes) + { + XmlNodeList childNodes = node.ChildNodes; + foreach (XmlNode childNode in childNodes) + { + if (childNode.Name.ToString() == "Level") { val = childNode.InnerText; } + else if (childNode.Name.ToString() == "Name") { key = childNode.InnerText; } + else { continue; } + if (i % 2 != 0) + { + if (key != null && val != null) + { + double standardlevelvalue = Convert.ToDouble(val) * standardlengthConversion; + standardStoryHeight.Add(key, standardlevelvalue); + key = null; + val = null; + } + else + { + + } + } + else + { + if (key != null && val != null) + { + double testlevelvalue = Convert.ToDouble(val) * testlengthConversion; + testStoryHeight.Add(key, testlevelvalue); + key = null; + val = null; + } + else + { + + } + } + } + } + + //reporting + if (i % 2 != 0) + { + if (standardStoryHeight.Count == 0) + { + logger.Info("TEST FILE FAILURE: Test cannot be completed. Standard File Level Count returns Zero."); + return false; + } + else if (testStoryHeight.Count == 0) + { + logger.Info("TEST FILE FAILURE: Test cannot be completed. Test File Level Count returns Zero."); + return false; + } + else + { + //set pass to true + int count = 0; + foreach (KeyValuePair standardPair2 in standardStoryHeight) + { + count++; + double difference; + StoryHeightMin = 10000; + string equivLevel = ""; + if (testStoryHeight.ContainsKey(standardPair2.Key)) + { + double matchkeydiff = Math.Abs(standardPair2.Value - testStoryHeight[standardPair2.Key]); + if (matchkeydiff == 0) + { + logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key +" Exactly"); + continue; + } + else if (matchkeydiff < tolerance) + { + logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + " within allowable tolerance."); + + continue; + } + else + { + logger.Info("TEST FILE FAILURE: Did NOT Match Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key+ " within allowable tolerance."); + return false; + } + + } + foreach (KeyValuePair testPair in testStoryHeight) + { + difference = Math.Abs(Convert.ToDouble(standardPair2.Key) * standardlengthConversion - Convert.ToDouble(testPair.Key) * testlengthConversion); + //store all levels and the difference between them + if (StoryHeightMin > difference) + { + StoryHeightMin = difference; + standLevel = standardPair2.Value.ToString(); + } + } + if (StoryHeightMin < tolerance) + { + logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " @ " + standardPair2.Key + " within the Tolerance allowed"); + } + else + { + logger.Info("Standard File's " + standardPair2.Value + " equivalent was not found in the test file. The closest level in the test file was found at " + equivLevel + " in the test file. The difference in heights was " + StoryHeightMin.ToString()); + } + + } + } + } + } + + catch (Exception e) + { + logger.Info(e.ToString()); + logger.Info(" Failed to complete Building Story Level test in the XML file."); + return false; + } + + } + return true; + } + public static DOEgbXMLReportingObj TestBuildingStoryRHR(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test analyzes each of the story PolyLoop coordinates in the standard and test files. These PolyLoop "; + report.testSummary += "coordinates define the z-height and orientation of each story plane. This test analyzes the normal vector "; + report.testSummary += "created by the PolyLoop coordinates. The PolyLoop coordinates must be sequenced in a counterclockwise manner "; + report.testSummary += " such that when the right hand rule is applied to this sequence of coordinates, a resultant normal vector "; + report.testSummary += " will point in the +z direction."; + report.testSummary += " If the PolyLoop coordinates do not form vectors that point in the +Z direction"; + report.testSummary += " (when the right hand rule is applied), then this test will fail. It is assumed that the vectors that define"; + report.testSummary += " the story plane will be parallel to the X-Y axis.The tolerance is always zero for this test, "; + report.testSummary += "meaning the resulting unit vector will point in the positive Z direction with no margin for error."; + + report.unit = Units; + + //stores the level's z heights + List LevelZs = new List(); + //stores the list of z heights for both files + List> fileLevelZz = new List>(); + //stores the RHR x product and the corresonding z height for a level + Dictionary levelVct = new Dictionary(); + //stores a list of the RHR x product and corresponding z height for both files + List> fileLevelVct = new List>(); + + VectorMath.Vector.CartVect vector = new VectorMath.Vector.CartVect(); + + int errorCount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + + + try + { + //refresh + LevelZs.Clear(); + levelVct.Clear(); + + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + //maybe it would be good if the test result spits out the name of the story? TBD + XmlNodeList PlanarGeometry = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey/gbXMLv5:PlanarGeometry", gbXMLns); + XmlNodeList PolyLoops = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey/gbXMLv5:PlanarGeometry/gbXMLv5:PolyLoop", gbXMLns); + int nodecount = PolyLoops.Count; + //get the normals for each level in the Standard File + //get the z-coordinate for each level (we assume that the levels are going to be parallel to Z + LevelZs = GetLevelZs(PlanarGeometry, LevelZs); + foreach (string level in LevelZs) + { + //a simple attempt to filter out exceptions, which could be returned in some instances + if (level.Length < 10) + { + vector = GetPolyLoopXProduct(PlanarGeometry, level); + string levelValue = level; + //if (i == 0) { levelValue += "-T"; } + //else { levelValue += "-S"; } + levelVct.Add(levelValue, vector); + } + } + fileLevelZz.Add(LevelZs); + fileLevelVct.Add(levelVct); + + //reporting + if (i % 2 != 0) + { + Dictionary standDict = fileLevelVct[1]; + Dictionary testDict = fileLevelVct[0]; + foreach (KeyValuePair pair in standDict) + { + + if (testDict.ContainsKey(pair.Key)) + { + report.MessageList.Add("While searching for matching building levels, there has been a Successful match. Building Story Level " + pair.Key + " in the Standard file found a match in the Test File."); + report.passOrFail = true; + //perform cross product again of the two vectors in question. The result should be a zero since they should be parallel + VectorMath.Vector.CartVect rhrTestVector = VectorMath.Vector.CrossProduct(testDict[pair.Key], standDict[pair.Key]); + if (rhrTestVector.X == 0 && rhrTestVector.Y == 0 && rhrTestVector.Z == 0) + { + report.MessageList.Add("For this level match, there is Normal Vector Test Success. The right hand rule test identified a parallel normal vector for Level " + pair.Key + " in both the Standard and Test gbXML Files."); + report.passOrFail = true; + } + else + { + VectorMath.Vector.CartVect rhrTestVectorU = VectorMath.Vector.UnitVector(rhrTestVector); + //create a test to determine the angular difference between the two vectors is within tolerance + //|A||B|cos theta = A x B + + //if the angle is within the allowable tolerance, then pass the test with a note that the vectors + //were not parallel + } + + } + else + { + report.MessageList.Add("The right hand rule test for Level " + pair.Key + " in the Standard File could not be completed. A match for this level could not be found in the test file."); + report.passOrFail = false; + errorCount++; + } + } + + } + else { continue; } + + //need to comapre and have if then statement depending on the outcome of the accuracy tests + if (errorCount == 0) + { + report.longMsg = "Test Success: Building Stories RHR in the Test File match the RHR in the Standard File for all Levels."; + } + else + { + report.longMsg = "Not all levels in the Standard File found equivalent levels and normal vectors in the Test File."; + } + return report; + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to complete RHR Test for the Building Storey Nodes. Exception noted."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + return report; + } + + public static bool TestBuildingStoryRHR(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + + + //stores the list of z heights for both files + List> fileLevelZz = new List>(); + //stores the RHR x product and the corresonding z height for a level + Dictionary levelVct = new Dictionary(); + + + VectorMath.Vector.CartVect vector = new VectorMath.Vector.CartVect(); + + int errorCount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + //refresh + //stores the level's z heights + List LevelZs = new List(); + //stores a list of the RHR x product and corresponding z height for both files + List> fileLevelVct = new List>(); + + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + //maybe it would be good if the test result spits out the name of the story? TBD + XmlNodeList PlanarGeometry = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey/gbXMLv5:PlanarGeometry", gbXMLns); + XmlNodeList PolyLoops = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey/gbXMLv5:PlanarGeometry/gbXMLv5:PolyLoop", gbXMLns); + int nodecount = PolyLoops.Count; + //get the normals for each level in the Standard File + //get the z-coordinate for each level (we assume that the levels are going to be parallel to Z + LevelZs = GetLevelZs(PlanarGeometry, LevelZs); + if (LevelZs.Count == 0) + { + logger.Info("PROGRAMMER's NOTE: No level polyloops found in this file. Level polyloop checks are being ignored."); + return false; + } + foreach (string level in LevelZs) + { + //a simple attempt to filter out exceptions, which could be returned in some instances + if (level.Length < 10) + { + vector = GetPolyLoopXProduct(PlanarGeometry, level); + string levelValue = level; + //if (i == 0) { levelValue += "-T"; } + //else { levelValue += "-S"; } + levelVct.Add(levelValue, vector); + } + } + fileLevelZz.Add(LevelZs); + fileLevelVct.Add(levelVct); + + //reporting + if (i % 2 != 0) + { + if(fileLevelVct[0].Count > 0) + { + Dictionary standDict = fileLevelVct[1]; + Dictionary testDict = fileLevelVct[0]; + foreach (KeyValuePair pair in standDict) + { + + if (testDict.ContainsKey(pair.Key)) + { + logger.Info("PROGRAMMERS NOTE: While searching for matching building levels, there has been a Successful match. Building Story Level " + pair.Key + " in the Standard file found a match in the Test File."); + //perform cross product again of the two vectors in question. The result should be a zero since they should be parallel + VectorMath.Vector.CartVect rhrTestVector = VectorMath.Vector.CrossProduct(testDict[pair.Key], standDict[pair.Key]); + if (Math.Abs(rhrTestVector.X) <= 0.1 && Math.Abs(rhrTestVector.Y) == 0.1 && Math.Abs(rhrTestVector.Z) == 0.1) + { + logger.Info("TEST FILE SUCCESS: For this level match, there is Normal Vector Test Success. The right hand rule test identified a parallel normal vector for Level " + pair.Key + " in both the Standard and Test gbXML Files."); + } + else + { + logger.Info("TEST FILE FAILURE: For this level match, there not Normal Vector Test Success. The right hand rule test shows the vectors are not parallel for Level " + pair.Key + " in both the Standard and Test gbXML Files."); + } + + } + else + { + logger.Fatal("The right hand rule test for Level " + pair.Key + " in the Standard File could not be completed. A match for this level could not be found in the test file."); + return false; + } + } + } + + } + else { continue; } + + //need to comapre and have if then statement depending on the outcome of the accuracy tests + if (errorCount == 0) + { + logger.Info("TEST FILE SUCCESS: Building Stories RHR in the Test File match the RHR in the Standard File for all Levels."); + } + else + { + logger.Info("TEST FILE FAILURE: Not all levels in the Standard File found equivalent levels and normal vectors in the Test File."); + return false; + } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete RHR Test for the Building Storey Nodes. Exception noted."); + return false; + } + } + return true; + } + + //this is a support function used by the GetLevelHeights function above. It is not directly, + //iteslf, a test + private static List GetLevelZs(XmlNodeList PlanarGeometry, List LevelZs) + { + string result = ""; + int polyLoopCount = 0; + try + { + int nodecount = PlanarGeometry.Count; + VectorMath.Vector.CartCoord[] vCoords = new VectorMath.Vector.CartCoord[nodecount]; + foreach (XmlNode PolyLoops in PlanarGeometry) + { + //gathers all the cartesian points in a given polyloop + foreach (XmlNode cartesianPoints in PolyLoops) + { + + //test the polyloop RHR convention + //count the total number of cartesian coordinates + int coordcount = cartesianPoints.ChildNodes.Count; + //I may want to test the number of coordinates to make sure it matches + //I do want to ensure I have a minimum number of coords + if (coordcount < minPlanePoints) + { + result += "Insufficient number of cartesian points to define a plane"; + LevelZs.Add(result); + return LevelZs; + } + else + { + int cartPtCount = 0; + //gets a set of XYZ coordinates, one at a time + foreach (XmlNode coordinates in cartesianPoints.ChildNodes) + { + //I will only test one Z-coordinate in each set of coordinates + if (cartPtCount < 1) + { + VectorMath.Vector.CartCoord vC = new VectorMath.Vector.CartCoord(); + vCoords[polyLoopCount] = vC; + } + else { break; } + + int crdCount = 1; + //gets each coordinate one at a time + foreach (XmlNode coordinate in coordinates.ChildNodes) + { + double coord = Convert.ToDouble(coordinate.InnerText); + switch (crdCount) + { + case 1: + vCoords[polyLoopCount].X = coord; + break; + case 2: + vCoords[polyLoopCount].Y = coord; + break; + case 3: + vCoords[polyLoopCount].Z = coord; + break; + default: + break; + } + crdCount++; + } + cartPtCount++; + } + } + + } + polyLoopCount++; + } + //create the List that holds the z-values of each level + for (int z = 0; z < nodecount; z++) + { + LevelZs.Add(vCoords[z].Z.ToString()); + } + + return LevelZs; + } + + catch (Exception e) + { + result += e.ToString(); + LevelZs.Add(result); + return LevelZs; + } + } + //this is a simple way to get the polyLoop X product. + //this is a support function used by the Function TestBuildingStory RHR above + //This X Product routine is the first attempt to produce a X product from coordinates Since the coordinates used to define + //a level plane never create an irregular polygon, this scheme worked. + //it will only assuredly work properly for a triangle, square, or rectangle. Shapes other than these should use subsequent XProduct + //functions as created below. + //Created by CHarriman, Senior Product Manager Carmel Software + //Nov 2012 + public static VectorMath.Vector.CartVect GetPolyLoopXProduct(XmlNodeList PlanarGeometry, string level) + { + int cartPtCount = 0; + VectorMath.Vector.CartVect xProd = new VectorMath.Vector.CartVect(); + //gathers all the cartesian points in a given polyloop + int nodecount = PlanarGeometry.Count; + VectorMath.Vector.CartCoord[] vCoords = new VectorMath.Vector.CartCoord[3]; + foreach (XmlNode PolyLoops in PlanarGeometry) + { + foreach (XmlNode cartesianPoints in PolyLoops) + { + + //test the polyloop RHR convention + //count the total number of cartesian coordinates + int coordcount = cartesianPoints.ChildNodes.Count; + //I may want to test the number of coordinates to make sure it matches, or if it has a minimum number of coords + if (coordcount < minPlanePoints) + { + //result += "Insufficient number of cartesian points to define a plane"; + return xProd; + } + else + { + cartPtCount = 0; + //gets a set of XYZ coordinates, one at a time + foreach (XmlNode coordinates in cartesianPoints.ChildNodes) + { + if (cartPtCount < 3) + { + VectorMath.Vector.CartCoord vC = new VectorMath.Vector.CartCoord(); + vCoords[cartPtCount] = vC; + } + else { break; } + + int crdCount = 1; + //gets each coordinate one at a time + //filtering through the inner children of the PolyLoop + foreach (XmlNode coordinate in coordinates.ChildNodes) + { + double coord = Convert.ToDouble(coordinate.InnerText); + switch (crdCount) + { + case 1: + vCoords[cartPtCount].X = coord; + break; + case 2: + vCoords[cartPtCount].Y = coord; + break; + case 3: + vCoords[cartPtCount].Z = coord; + break; + default: + break; + } + if (vCoords[cartPtCount].Z.ToString() == level) { break; }; + crdCount++; + } + + cartPtCount++; + } + + } + } + if (vCoords[(cartPtCount - 1)].Z.ToString() == level) { break; } + } + //Get the Cross Product + VectorMath.Vector.CartVect v1 = VectorMath.Vector.CreateVector(vCoords[0], vCoords[1]); + VectorMath.Vector.CartVect v2 = VectorMath.Vector.CreateVector(vCoords[1], vCoords[2]); + xProd = VectorMath.Vector.CrossProduct(v1, v2); + xProd = Vector.UnitVector(xProd); + return xProd; + + } + + //this test was originally invented for the case where the proposed and test cases did not have to be identical + //it was designed simply to ensure that only the TEST file had unique SpaceId values. + //Created by CHarriman Senior Product Manager Carmel Software + //Nov 2012 + public static DOEgbXMLReportingObj UniqueSpaceIdTest(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test reviews the test file's Space id values, and ensures that they are all unique. If there are any duplicate Space id values, then this test will fail. If there are duplicates, the remainder of the tests in the testbed are not executed and the test will end here until the test file is properly updated. Each Space id must be unique for the test bed to successfully execute. If you have failed this test, please review the documents for this test and resubmit the test."; + + report.MessageList = new List(); + report.TestPassedDict = new Dictionary(); + List standardIdList = new List(); + List testIDList = new List(); + report.standResult = new List(); + report.testResult = new List(); + report.idList = new List(); + // report.testType = "UniqueId"; + try + { + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbXMLns); + foreach (XmlNode node in nodes) + { + //looks to see if the spaceId is already included in the list of space IDs being generated + string spaceId; + if (i % 2 != 0) + { + for(int n=0; n < node.Attributes.Count; n++) + { + if (node.Attributes[n].Name == "id") + { + spaceId = node.Attributes[n].Value.ToString(); + standardIdList.Add(spaceId); + } + } + + } + else + { + for (int n = 0; n < node.Attributes.Count; n++) + { + if (node.Attributes[n].Name == "id") + { + spaceId = node.Attributes[n].Value.ToString(); + testIDList.Add(spaceId); + + } + } + } + } + } + //now that I have all of the spaceIds, I will loop through and make sure I have perfect matches + //the order of the spaces is not enforced + //create a list that holds the index of the standardIdList when a match is found + //the list should be the same length as standardIdlist and each value should be unique + List indexFound = new List(); + for (int j = 0; j < standardIdList.Count; j++) + { + string standardId = standardIdList[j]; + foreach (string testspaceId in testIDList) + { + if (testspaceId == standardId) + { + indexFound.Add(j); + report.MessageList.Add("The standard file space id: " + standardId + "has found a spaceId match in the test file."); + } + } + } + //search the list to make sure that it is unique and has the proper count + if (indexFound.Count == standardIdList.Count) + { + report.MessageList.Add("The standard file has found a match only once in the test file. All spaceIds have been matched."); + report.passOrFail = true; + report.longMsg = "SpaceId Match test has passed."; + return report; + } + else + { + report.passOrFail = false; + string index = ""; + foreach (int p in indexFound) + { + index += p.ToString() + ", "; + } + report.MessageList.Add(index); + report.longMsg = "SpaceId Match test has failed."; + return report; + } + + //if (standardIdList.Contains(spaceId)) + //{ + // report.testResult.Add("Not Unique"); + + // report.longMsg = "Unique SpaceID Test Failed. " + spaceId + " is already included once in the test file."; + // report.passOrFail = false; + // report.TestPassedDict[spaceId] = false; + // return report; + //} + //else + //{ + // report.testResult.Add("Is Unique"); + + // spaceId = node.Attributes[0].Value.ToString(); + // standardIdList.Add(spaceId); + // report.passOrFail = true; + // report.TestPassedDict.Add(spaceId, true); + // report.MessageList.Add(spaceId + " is unique."); + //} + } + catch (Exception e) + { + report.longMsg = e.ToString(); + report.passOrFail = false; + return report; + } + return report; + } + + public static bool UniqueSpaceIdTest(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + //report.testSummary = "This test reviews the test file's Space id values, and ensures that they are all unique. If there are any duplicate Space id values, then this test will fail. If there are duplicates, the remainder of the tests in the testbed are not executed and the test will end here until the test file is properly updated. Each Space id must be unique for the test bed to successfully execute. If you have failed this test, please review the documents for this test and resubmit the test."; + + //report.MessageList = new List(); + //report.TestPassedDict = new Dictionary(); + List standardIdList = new List(); + List testIDList = new List(); + //report.standResult = new List(); + //report.testResult = new List(); + //report.idList = new List(); + // report.testType = "UniqueId"; + try + { + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbXMLns); + foreach (XmlNode node in nodes) + { + //looks to see if the spaceId is already included in the list of space IDs being generated + string spaceId; + if (i % 2 != 0) + { + for (int n = 0; n < node.Attributes.Count; n++) + { + if (node.Attributes[n].Name == "id") + { + spaceId = node.Attributes[n].Value.ToString(); + standardIdList.Add(spaceId); + } + } + + } + else + { + for (int n = 0; n < node.Attributes.Count; n++) + { + if (node.Attributes[n].Name == "id") + { + spaceId = node.Attributes[n].Value.ToString(); + testIDList.Add(spaceId); + + } + } + } + } + } + //now that I have all of the spaceIds, I will loop through and make sure I have perfect matches + //the order of the spaces is not enforced + //create a list that holds the index of the standardIdList when a match is found + //the list should be the same length as standardIdlist and each value should be unique + List indexFound = new List(); + for (int j = 0; j < standardIdList.Count; j++) + { + string standardId = standardIdList[j]; + foreach (string testspaceId in testIDList) + { + if (testspaceId == standardId) + { + indexFound.Add(j); + logger.Info("PROGRAMMER'S NOTE: The standard file space id: " + standardId + "has found a spaceId match in the test file."); + } + } + } + //search the list to make sure that it is unique and has the proper count + if (indexFound.Count == standardIdList.Count) + { + logger.Info("TEST FILE SUCCESS: The standard file has found a match only once in the test file. All spaceIds have been matched."); + var stsum = cr.SpacesSummary.Find(x => x.FileType == "Standard"); + stsum.spaceIDs_unique = true; + var testsum = cr.SpacesSummary.Find(x => x.FileType == "Test"); + testsum.spaceIDs_unique = true; + return true; + } + else + { + logger.Info("TEST FILE FAILURE: The standard file has found a duplicate space ID."); + var stsum = cr.SpacesSummary.Find(x => x.FileType == "Standard"); + stsum.spaceIDs_unique = false; + var testsum = cr.SpacesSummary.Find(x => x.FileType == "Test"); + testsum.spaceIDs_unique = false; + stsum.PassedAllTests = false; + return false; + } + + + } + catch (Exception e) + { + logger.Fatal(e.ToString()); + return false; + } + } + + public static DOEgbXMLReportingObj TestSpaceAreas(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion,double standardareaConversion,DOEgbXMLTestDetail testDetails) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary = "This test compares the square footage of spaces in the test and standard files. It does this by searching"; + report.testSummary += "for a unique Space id in both the test and standard files, and finding a match. Once a match is found, it then"; + report.testSummary += " finds the square footage reported for the Space area, and compares them to ensure they are the same or"; + report.testSummary += " within tolerance. For example, if the standard file has a Space with id = \"Space-1\" with an area of"; + report.testSummary += "250 square feet, then this test searches through the test file for a Space with the identical id."; + report.testSummary += " Once this space has been located, the test then compares the Area to 250 square feet. "; + report.testSummary += "If they are identical, the test is done, and passes. We have built a tolerance in this test, meaning the"; + report.testSummary += " areas do not need to match perfectly in the standard file and test file. As long as your test file's value"; + report.testSummary += " for Space Area is +/- this tolerance, the test will pass. Using the previous example, if the allowable"; + report.testSummary += " tolerance is 1% (1% of 250 is 2.5 sf), then the test file may have a space area ranging from 247.5 to 252.5"; + report.testSummary += " square feet, and the test will still delcare \"Pass\"."; + + + report.unit = standardUnits.ToString(); + report.passOrFail = true; + string spaceId = ""; + //assuming that this will be plenty large for now + Dictionary standardFileAreaDict = new Dictionary(); + Dictionary testFileAreaDict = new Dictionary(); + bool thinWalled = false; + try + { + //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) + XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; + XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName",gbXMLnstw); + if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio")//TODO: consider a different test + { + thinWalled = true; + } + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Area", gbXMLnsm[i]); + //make lists of the areas in each project + foreach (XmlNode spaceNode in spaceNodes) + { + string area = spaceNode.InnerText; + if (i % 2 != 0) + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + if(!thinWalled) + { + //no conversion necessary + standardFileAreaDict.Add(spaceId, Convert.ToDouble(area)*standardareaConversion); + } + else + { + if(testDetails.ThinWalledSpecs.Count > 0) + { + var thinwalleddef = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); //it is critical that space names match for these tests. + standardFileAreaDict.Add(spaceId, thinwalleddef.FloorArea); + } + else + { + //no conversion necessary + standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); + } + } + break; + } + } + + } + else + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + + double convertedArea = Convert.ToDouble(area) * testareaConversion; + testFileAreaDict.Add(spaceId, convertedArea); + break; + } + } + + } + } + } + var standardKeys = standardFileAreaDict.Keys; + + foreach (string key in standardKeys) + { + if (testFileAreaDict.ContainsKey(key)) + { + double testFileSpaceArea = testFileAreaDict[key]; + double standardFileSpaceArea = standardFileAreaDict[key]; + + + report.standResult.Add(standardFileSpaceArea.ToString("#.000")); + report.testResult.Add(testFileSpaceArea.ToString("#.000")); + report.idList.Add(key); + + double difference = Math.Abs(testFileSpaceArea - standardFileSpaceArea); + if (difference == 0) + { + report.MessageList.Add("For Space Id: " + key + ". Success finding matching space area. The Standard File and the Test File both have a space with an area = " + testFileSpaceArea.ToString("#.000") + " " + report.unit + ". "); + report.TestPassedDict.Add(key, true); + } + else if (difference < report.tolerance) + { + report.MessageList.Add("For Space Id: " + key + ". Success finding matching space area. The Standard File space area of " + standardFileSpaceArea.ToString("#.000") + " and the Test File space area of " + testFileSpaceArea.ToString("#.000") + " " + report.unit + " is within the allowable tolerance of " + report.tolerance.ToString() + " " + report.unit); + report.TestPassedDict.Add(key, true); + } + else + { + report.MessageList.Add("For space Id: " + key + ". Failure to find an space area match. THe area equal to = " + standardFileSpaceArea.ToString("#.000") + " " + report.unit + " in the Standard File could not be found in the Test File. "); + report.TestPassedDict.Add(key, false); + } + } + else + { + report.standResult.Add("---"); + report.testResult.Add("Could not be matched"); + report.idList.Add(key); + //failure to match spaceIds + report.MessageList.Add("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); + report.passOrFail = false; + return report; + } + } + return report; + } + + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = "Failed to complete the " + report.testType + ". See exceptions noted."; + report.passOrFail = false; + return report; + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //created July 2016 by Chien Si Harriman, Independent Contractor + public static bool TestSpaceAreas(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion, DOEgbXMLTestDetail testDetails, double tolerance) + { + + + string spaceId = ""; + Dictionary standardFileAreaDict = new Dictionary(); + Dictionary testFileAreaDict = new Dictionary(); + bool thinWalled = false; + try + { + //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) + XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; + XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); + if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio")//TODO: consider a different test + { + thinWalled = true; + } + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Area", gbXMLnsm[i]); + //make lists of the areas in each project + foreach (XmlNode spaceNode in spaceNodes) + { + string area = spaceNode.InnerText; + if (i % 2 != 0) + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + if (!thinWalled) + { + //no conversion necessary + standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); + } + else + { + if(testDetails.ThinWalledSpecs.Count > 0) + { + var thinwalleddef = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); //it is critical that space names match for these tests. + standardFileAreaDict.Add(spaceId, thinwalleddef.FloorArea); + } + else + { + //no conversion necessary + standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); + } + } + break; + } + } + + } + else + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + + double convertedArea = Convert.ToDouble(area) * testareaConversion; + testFileAreaDict.Add(spaceId, convertedArea); + break; + } + } + + } + } + } + var standardKeys = standardFileAreaDict.Keys; + + foreach (string key in standardKeys) + { + logger.Info("SPACE ID: " + key); + //important, we don't make a new report unless one has already been created + DetailedSpaceSummary ds = new DetailedSpaceSummary(); + if (cr.SpacesReport.Count() != 0) { + var res = cr.SpacesReport.Find(x => x.ID == key); + if(res == null) + { + ds.ID = key; + } + else + { + ds = cr.SpacesReport.Find(x => x.ID == key); + } + } + else + { + ds.ID = key; + } + ds.AreaUnits = "SquareFeet"; + if (testFileAreaDict.ContainsKey(key)) + { + double testFileSpaceArea = testFileAreaDict[key]; + double standardFileSpaceArea = standardFileAreaDict[key]; + ds.TotalSurfaceArea = standardFileSpaceArea; + ds.TotalTestSurfaceArea = testFileSpaceArea; + + double difference = Math.Abs(testFileSpaceArea - standardFileSpaceArea); + if (difference == 0) + { + logger.Info("TEST FILE SUCCESS:PERFECT : Found matching space area with an area = " + testFileSpaceArea.ToString("#.000") +" "+standardUnits+". "); + ds.FoundMatch = true; + } + else if (difference < tolerance) + { + logger.Info("TEST FILE SUCCESS: Found matching space area with an area = " + testFileSpaceArea.ToString("#.000") + " " + standardUnits + "."); + ds.FoundMatch = true; + } + else + { + logger.Info("TEST FILE FAILURE: " + key + ". Failure to find an space area match."); + ds.FoundMatch = false; + } + } + else + { + logger.Info("TEST FILE FAILURE: "+key +" Could not be matched"); + //failure to match spaceIds + logger.Info("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); + ds.FoundMatch = false; + return false; + } + cr.SpacesReport.Add(ds); + } + } + + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal("Failed to complete the Space Area Test. See exceptions noted."); + return false; + } + return true; + } + + //Tolerance checks depend upon percentage tolerances + public static DOEgbXMLReportingObj TestSpaceVolumes(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, Conversions.volumeUnitEnum standardUnits, Conversions.volumeUnitEnum testUnits, double testvolConversion, double standardvolConversion,DOEgbXMLTestDetail testDetails) + { + report.passOrFail = true; + string spaceId = ""; + report.unit = standardUnits.ToString(); + //assuming that this will be plenty large for now + Dictionary standardFileVolumeDict = new Dictionary(); + Dictionary testFileVolumeDict = new Dictionary(); + bool thinWalled = false; + try + { + //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) + XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; + XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); + if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio") //TODO: consider a different test. + { + thinWalled = true; + } + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Volume", gbXMLnsm[i]); + //make lists of the areas in each project + foreach (XmlNode spaceNode in spaceNodes) + { + string volume = spaceNode.InnerText; + if (i % 2 != 0) + { + spaceId = spaceNode.ParentNode.Attributes[0].Value; + if(!thinWalled) + { + //no conversion necessary + standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); + } + else + { + if(testDetails.ThinWalledSpecs.Count > 0) + { + var twSpec = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); + standardFileVolumeDict.Add(spaceId, twSpec.Volume); + } + else + { + //no conversion necessary + standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); + } + } + } + else + { + spaceId = spaceNode.ParentNode.Attributes[0].Value; + double convertedValue = Convert.ToDouble(volume) * testvolConversion; + testFileVolumeDict.Add(spaceId, convertedValue); + } + } + } + var standardKeys = standardFileVolumeDict.Keys; + foreach (string key in standardKeys) + { + if (testFileVolumeDict.ContainsKey(key)) + { + double standardFileVolume = standardFileVolumeDict[key]; + double testFileVolume = testFileVolumeDict[key]; + + report.standResult.Add(standardFileVolume.ToString("#.000")); + report.testResult.Add(testFileVolume.ToString("#.000")); + report.idList.Add(key); + + double pctdifference = Math.Abs(testFileVolume - standardFileVolume)/standardFileVolume; + if (pctdifference == 0) + { + report.MessageList.Add("For Space Id: " + key + ". Success finding matching space volume. The Standard and Test Files both have identical volumes: " + testFileVolume.ToString("#.000") + " " + report.unit + "for Space Id: " + key); + report.TestPassedDict.Add(key, true); + } + else if (pctdifference <= report.tolerance) + { + report.MessageList.Add("For Space Id: " + key + ". Success finding matching space volume. The Standard Files space volume of " + standardFileVolume.ToString("#.000") + " " + report.unit + "and the Test File space volume: " + testFileVolume.ToString("#.000") + " are within the allowed tolerance of" + report.tolerance.ToString() + " " + report.unit + "."); + report.TestPassedDict.Add(key, true); + } + else + { + //at the point of failure, the test will return with details about which volume failed. + report.MessageList.Add("For Space Id: " + key + ". Failure to find a volume match. The Volume in the Test File equal to: " + testFileVolume.ToString("#.000") + " " + report.unit + " was not within the allowed tolerance. SpaceId: " + key + " in the Standard file has a volume: " + standardFileVolume.ToString("#.000") + " ."); + report.TestPassedDict.Add(key, false); + } + } + else + { + report.standResult.Add("Space Id: " + key); + report.testResult.Add("Could not be matched"); + report.idList.Add(""); + + //at the point of failure, the test will return with details about which volume failed. + report.MessageList.Add("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); + report.passOrFail = false; + return report; + } + } + return report; + } + + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to complete the " + report.testType + ". See exceptions noted."; + report.passOrFail = false; + return report; + } + + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Created July 2016, refactored by Chien Si Harriman. The tolerance checks are based on percentage tolerances, not absolute. + public static bool TestSpaceVolumes(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr, Conversions.volumeUnitEnum standardUnits, Conversions.volumeUnitEnum testUnits, double testvolConversion, double standardvolConversion, DOEgbXMLTestDetail testDetails, double tolerance) + { + + string spaceId = String.Empty; + Dictionary standardFileVolumeDict = new Dictionary(); + Dictionary testFileVolumeDict = new Dictionary(); + bool thinWalled = false; + try + { + //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) + XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; + XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); + if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio") //TODO: consider a different test + { + thinWalled = true; + } + for (int i = 0; i < gbXMLDocs.Count; i++) + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Volume", gbXMLnsm[i]); + //make lists of the areas in each project + foreach (XmlNode spaceNode in spaceNodes) + { + string volume = spaceNode.InnerText; + if (i % 2 != 0) + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + if (!thinWalled) + { + //no conversion necessary + standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); + } + else + { + if(testDetails.ThinWalledSpecs.Count > 0) + { + var twSpec = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); + standardFileVolumeDict.Add(spaceId, twSpec.Volume); + break; + } + else + { + //no conversion necessary + standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); + } + } + } + } + } + else + { + for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) + { + if (spaceNode.ParentNode.Attributes[n].Name == "id") + { + spaceId = spaceNode.ParentNode.Attributes[n].Value; + double convertedValue = Convert.ToDouble(volume) * testvolConversion; + testFileVolumeDict.Add(spaceId, convertedValue); + break; + } + } + } + } + } + var standardKeys = standardFileVolumeDict.Keys; + foreach (string key in standardKeys) + { + logger.Info("SPACE ID:" + key); + //important, we don't make a new report unless one has already been created + DetailedSpaceSummary ds = new DetailedSpaceSummary(); + if (cr.SpacesReport.Count() != 0) { + var result = cr.SpacesReport.Find(x => x.ID == key); + if (result == null) + { + ds.ID = key; + } + else + { + ds = cr.SpacesReport.Find(x => x.ID == key); + } + } + else + { + ds.ID = key; + } + ds.VolumeUnits = "Cubic Feet"; + if (testFileVolumeDict.ContainsKey(key)) + { + double standardFileVolume = standardFileVolumeDict[key]; + double testFileVolume = testFileVolumeDict[key]; + ds.TotalVolume = standardFileVolume; + ds.TotalTestVolume = testFileVolume; + + + double pctdifference = Math.Abs(testFileVolume - standardFileVolume)/standardFileVolume; + if (pctdifference == 0) + { + logger.Info("TEST FILE SUCCESS:PERFECT : " + key + ". Success finding matching space volume."); + ds.FoundMatch = true; + } + else if (pctdifference <= tolerance) + { + logger.Info("TEST FILE SUCCESS: " + key + ". Success finding matching space volume."); + ds.FoundMatch = true; + } + else + { + //at the point of failure, the test will return with details about which volume failed. + logger.Info("TEST FILE FAILURE: " + key + ". Failure to find a volume match."); + ds.FoundMatch = false; + return false; + } + } + else + { + logger.Info("TEST FILE FAILURE: " + key + ". Failure to find a volume match."); + //at the point of failure, the test will return with details about which volume failed. + logger.Info("PROGRAMMER's NOTE: Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); + ds.FoundMatch = false; + } + } + var failures = cr.SpacesReport.FindAll(x => x.FoundMatch == false); + return (failures.Count > 0) ? false : true; + } + + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete the Spaces Volume Test. See exceptions noted."); + return false; + } + logger.Fatal("Fatal Spaces Volume Test Failure"); + return false; + } + + //this function was abandoned until the second phase + //Created Dec 2012 by CHarriman Senior Product Manager Carmel Software Corp + + public static DOEgbXMLReportingObj TestShellGeomPLRHR(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + string result = ""; + string floorarea = ""; + report.unit = Units; + List VectList = new List(); + + //keeps a dictionary of the shell geometry points for each space of the test file key = spaceId, value = List of Coordinates + Dictionary> shellGeomPtsTF = new Dictionary>(); + //keeps a dictionary of the shell geometry points for each space of the standard file key = spaceId, value = List of Coordinates + Dictionary> shellGeomPtsSF = new Dictionary>(); + //keeps a dictinary of the RHR vectors of the Test file + Dictionary> shellGeomRHRTF = new Dictionary>(); + //keeps a dictionary of the RHR vectors of the Standard file + Dictionary> shellGeomRHRSF = new Dictionary>(); + + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + //for each space, gather some kind of qualifying information + XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbXMLns); + foreach (XmlNode space in spaceNodes) + { + XmlNodeList spaceChildren = space.ChildNodes; + foreach (XmlNode spaceChild in spaceChildren) + { + if (spaceChild.Name == "ShellGeometry") + { + XmlNode closedShell = spaceChild.FirstChild; + switch (i) + { + case 0: + shellGeomPtsTF = GetShellGeomPts(closedShell); + break; + case 1: + shellGeomPtsSF = GetShellGeomPts(closedShell); + break; + default: + break; + } + //Determine if matches found everywhere + + //Test the two sets of data points to find matches return the coordinate matches in the same order they + //are presented in the gbXML file + + switch (i) + { + case 0: + shellGeomRHRTF = GetShellGeomPolyRHR(shellGeomPtsTF); + break; + case 1: + shellGeomRHRSF = GetShellGeomPolyRHR(shellGeomPtsSF); + break; + default: + break; + + } + + } + + } + } + + + } + catch (Exception e) + { + + } + return report; + } + return report; + } + //this is a support tool for TestShellGeomPLRHR + public static Dictionary> GetShellGeomPts(XmlNode closedShell) + { + Dictionary> PtsDict = new Dictionary>(); + + string spaceId = "none"; + + int cartPtCount; + try + { + //get the name of the space for which this point is defined + XmlNode spaceNode = closedShell.ParentNode; + XmlAttributeCollection spaceAtts = spaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "id") + { + spaceId = at.Value; + break; + } + + } + //keep track of the number of polyloops in the closed shell + int pLCount = 1; + //store the geometry points + foreach (XmlNode PolyLoops in closedShell) + { + List vCoords = new List(); + List PtsList = new List(); + cartPtCount = 0; + foreach (XmlNode cartesianPoints in PolyLoops) + { + //reset surface area and unitRHR (this is how I know that there may be a problem + //and these values are returned as points. It is not perfect + Vector.CartCoord Pts = new Vector.CartCoord(); + Pts.X = -999; + Pts.Y = -999; + Pts.Z = -999; + PtsList.Add(Pts); + int crdCount = 1; + //gets a set of XYZ coordinates, one at a time + foreach (XmlNode coordinate in cartesianPoints.ChildNodes) + { + double coord = Convert.ToDouble(coordinate.InnerText); + switch (crdCount) + { + case 1: + + PtsList[cartPtCount].X = coord; + break; + case 2: + + PtsList[cartPtCount].Y = coord; + break; + case 3: + + PtsList[cartPtCount].Z = coord; + break; + default: + break; + + } + crdCount++; + } + cartPtCount++; + } + string spaceSurface = spaceId + "/" + pLCount.ToString(); + //create memory safe points list + List mptsList = new List(); + foreach(Vector.CartCoord cd in PtsList) + { + mptsList.Add(new Vector.MemorySafe_CartCoord(cd.X, cd.Y, cd.Z)); + } + PtsDict.Add(spaceSurface, mptsList); + pLCount++; + //PtsList.Clear(); + } + } + catch (Exception e) + { + + } + //I may want to test the number of coordinates to make sure it matches, or if it has a minimum number of coords + return PtsDict; + } + + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //Designed to take a Dictionary + //Each set of points are then turned into vectors, which are then put through a cross product to determine the + //normal vector. we only arbitrarily take the first three points in the list, which potentially could cause some issue. + //This is planned to be fixed in a future release. + //The normal vector calculated is the value in the key value pair, the key being the spaceId+surfaceNumber, + //The Dictionary is returned with it includes a key value pair for each surface it has analyzed. + //Therefore, if the Dictionary sent to it has 12 key value pairs, then it will return 12 key value pairs as well. + //This is not checked for explicitly but mentioned for clarity. + public static Dictionary> GetShellGeomPolyRHR(Dictionary> PtsList) + { + //reg expressions + string iDPatt = "(.+)[^/][0-9]"; + string numPatt = "[0-9]+"; + + //initialize variables needed in this method + VectorMath.Vector.CartVect unitRHR = new VectorMath.Vector.CartVect(); + List vCoords = new List(); + List vVect = new List(); + string spaceId = "none"; + string spacenum = ""; + //dictionary that will be returned by this method + Dictionary> plRHRDict = new Dictionary>(); + + //begin iterating through the Cartesian Points passed into the method (PtsList) + for (int i = 0; i < PtsList.Count; i++) + { + //get the identification strings associated with each list of points in the dictionary passed to the method + string spaceSurf = PtsList.Keys.ElementAt(i); + foreach (Match match in Regex.Matches(spaceSurf, iDPatt)) + { + spaceId = match.ToString(); + } + string spaceSurf2 = PtsList.Keys.ElementAt(i); + foreach (Match match in Regex.Matches(spaceSurf2, numPatt)) + { + spacenum = match.ToString(); + } + + //take the list of coordinates and store them locally + //this step does not need to be taken, but it does simplify the coding a little bit. + foreach (Vector.MemorySafe_CartCoord coord in PtsList.Values.ElementAt(i)) + { + vCoords.Add(coord); + } + //just arbitrarily take the first 3 coordinates + //this can lead to bad results, but is used until the next release of the software + VectorMath.Vector.CartVect v1 = VectorMath.Vector.CreateVector(vCoords[0], vCoords[1]); + VectorMath.Vector.CartVect v2 = VectorMath.Vector.CreateVector(vCoords[1], vCoords[2]); + unitRHR = VectorMath.Vector.CrossProduct(v1, v2); + unitRHR = Vector.UnitVector(unitRHR); + vVect.Add(unitRHR); + vCoords.Clear(); + + } + plRHRDict.Add(spaceId, vVect); + return plRHRDict; + } + + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of all surface elements in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements in the test and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears in both files. If the "; + report.testSummary += "quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test may pass even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + int nodecount = nodes.Count; + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference; + report.passOrFail = false; + return report; + } + } + else { continue; } + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "ExteriorWall in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetEWSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"ExteriorWall\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this SurfaceType in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "ExteriorWall") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's " + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " exterior wall surfaces in the Standard File and " + resultsArray[i - 1] + " exterior wall surfaces in the Test File."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Written July 2016 by Chien Si Harriman + public static bool GetEWSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + //assuming that this will be plenty large for now + string[] resultsArray = new string[500]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "ExteriorWall") + { + nodecount++; + } + break; + } + } + } + if (i % 2 == 0) + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfExternalWalls = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfExternalWalls = nodecount; + } + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Exterior Wall count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Exterior Wall does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete the Exterior Wall Count Procedure."); + return false; + } + } + return true; + } + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "InterirWall" in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetIWSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"InteriorWall\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this SurfaceType in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "InteriorWall") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's" + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " interior wall surfaces in the standard file and " + resultsArray[i - 1] + " interior wall surfaces in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + public static bool GetIWSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "InteriorWall") + { + nodecount++; + } + break; + } + } + } + + if (i % 2 == 0) //this should never fail, but it will if orders are reversed. Have to be careful. + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfInternalWalls = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfInternalWalls = nodecount; + } + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Interior Wall count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Interior Wall count does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete Interior Wall Count"); + return false; + } + } + return true; + } + + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "InteriorFloor" in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetIFSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"InteriorFloor\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "InteriorFloor") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's" + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " interior floor surfaces in the standard file and " + resultsArray[i - 1] + " interior floor surfaces in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //created July 2016 by Chien Si Harriman + public static bool GetIFSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "InteriorFloor" || type == "Ceiling") + { + nodecount++; + } + break; + } + } + } + + if (i % 2 == 0) //this should never fail, but it will if orders are reversed. Have to be careful. + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfInternalFloors_Ceilings = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfInternalFloors_Ceilings = nodecount; + } + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Interior Floor/Ceiling count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Interior Floor/Ceiling count does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete Interior Floor/Ceiling Count"); + return false; + } + } + return true; + } + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "Roof" in a test file and standard file + //If the number of roof surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetRoofSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"Roof\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Roof") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's" + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " roof surfaces in the standard file and " + resultsArray[i - 1] + " roof surfaces in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //Created July 2016 + public static bool GetRoofSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Roof") + { + nodecount++; + } + break; + } + } + } + + if (i % 2 == 0) //this should never fail, but it will if orders are reversed. Have to be careful. + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfRoofs = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfRoofs = nodecount; + } + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Roof count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Roof count does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete Roof Count"); + return false; + } + } + return true; + } + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "Shade" in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetShadeSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"Shade\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Shade") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's " + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " shading surfaces in the standard file and " + resultsArray[i - 1] + " shading surfaces in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + //created July 2016 by Chien Si Harriman + public static bool GetShadeSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Shade") + { + nodecount++; + } + break; + } + } + } + + if (i % 2 == 0) //this should never fail, but it will if orders are reversed. Have to be careful. + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfShades = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfShades = nodecount; + } + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Shading device count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Shading device count does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete Shading Device Count."); + return false; + } + } + return true; + } + + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //A simple method that reports the number of surface elements whose surfaceType attribute = "Air" in a test file and standard file + //If the number of surface elements is not the same, the method returns false and displays the difference in the number of surfaces. + public static DOEgbXMLReportingObj GetAirSurfaceCount(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test compares the total number of Surface elements with the SurfaceType=\"Air\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the surface counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + report.testSummary += " You may notice that this test has failed, but overall your file has passed. This is because the surface"; + report.testSummary += " count may not be a perfect indicator of accuracy. So overall, the test summary may show \"Pass\" even though this test failed."; + report.testSummary += " Refer to the pass/fail summary sheet for more information."; + + report.unit = Units; + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Air") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + report.longMsg = "The Test File's" + report.testType + " matches the Standard File exactly, the difference is zero."; + report.passOrFail = true; + return report; + } + else if (difference <= report.tolerance) + { + report.longMsg = "The Test File's " + report.testType + " matches Standard File within the allowable tolerance, the difference between the two files is " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The Test File's " + report.testType + " does not match Standard File, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " air surfaces in the standard file and " + resultsArray[i - 1] + " air surfaces in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + public static bool GetAirSurfaceCount(List gbXMLDocs, List gbXMLnsm, ref CampusReport cr) + { + + + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "surfaceType") + { + string type = at.Value; + if (type == "Air") + { + nodecount++; + } + break; + } + } + } + + if (i % 2 == 0) //this should never fail, but it will if orders are reversed. Have to be careful. + { + cr.SurfacesSummary.Find(x => x.FileType == "Test").NumberOfAirSurfaces = nodecount; + } + else + { + cr.SurfacesSummary.Find(x => x.FileType == "Standard").NumberOfAirSurfaces = nodecount; + } + if (i % 2 != 0) + { + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference == 0) + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Air surfaces count matches the Standard File exactly, the difference is zero."); + } + else + { + logger.Info("PROGRAMMER'S NOTE: The Test File's Air surfaces count does not match the Standard File exactly."); + } + + } + else { continue; } + + } + catch (Exception e) + { + logger.Debug(e.ToString()); + logger.Fatal(" Failed to complete Air surfaces Count."); + return false; + } + } + return true; + } + // + //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation + //This method will take each surface element and convert the xml language into an instance of a SurfaceDefinition + //Each surface is converted in this way, with the resulting instance being stored in a list that is returned for later use + //----------------------

+ //This is an important method because it stores all of the information about a surface in a gbXML file in a list + //This list can later be recalled to perform analytics on the surfaces and the data contained within + private static List GetFileSurfaceDefs(XmlDocument xmldoc, XmlNamespaceManager xmlns) + { + List surfaces = new List(); + try + { + + XmlNodeList nodes = xmldoc.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface", xmlns); + foreach (XmlNode surfaceNode in nodes) + { + //initialize a new instance of the class + SurfaceDefinitions surfDef = new SurfaceDefinitions(); + surfDef.AdjSpaceId = new List(); + surfDef.PlCoords = new List(); + + //get id and surfaceType + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "id") + { + surfDef.SurfaceId = at.Value; + } + else if (at.Name == "surfaceType") + { + surfDef.SurfaceType = at.Value; + } + } + if (surfaceNode.HasChildNodes) + { + XmlNodeList surfChildNodes = surfaceNode.ChildNodes; + foreach (XmlNode node in surfChildNodes) + { + if (node.Name == "AdjacentSpaceId") + { + XmlAttributeCollection adjSpaceIdAt = node.Attributes; + foreach (XmlAttribute at in adjSpaceIdAt) + { + if (at.Name == "spaceIdRef") + { + surfDef.AdjSpaceId.Add(at.Value); + } + } + } + else if (node.Name == "RectangularGeometry") + { + if (node.HasChildNodes) + { + XmlNodeList rectGeomChildren = node.ChildNodes; + foreach (XmlNode rgChildNode in rectGeomChildren) + { + if (rgChildNode.Name == "Azimuth") { surfDef.Azimuth = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "CartesianPoint") + { + if (rgChildNode.HasChildNodes) + { + Vector.CartCoord cd = new Vector.CartCoord(); + XmlNodeList coordinates = rgChildNode.ChildNodes; + int pointCount = 1; + foreach (XmlNode coordinate in coordinates) + { + switch (pointCount) + { + case 1: + cd.X = Convert.ToDouble(coordinate.InnerText); + break; + case 2: + cd.Y = Convert.ToDouble(coordinate.InnerText); + break; + case 3: + cd.Z = Convert.ToDouble(coordinate.InnerText); + break; + } + pointCount++; + } + surfDef.InsertionPoint = new VectorMath.Vector.MemorySafe_CartCoord(cd.X, cd.Y, cd.Z); + } + } + else if (rgChildNode.Name == "Tilt") { surfDef.Tilt = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "Height") { surfDef.Height = Convert.ToDouble(rgChildNode.InnerText); } + else if (rgChildNode.Name == "Width") { surfDef.Width = Convert.ToDouble(rgChildNode.InnerText); } + } + } + } + else if (node.Name == "PlanarGeometry") + { + XmlNode polyLoop = node.FirstChild; + if (polyLoop.HasChildNodes) + { + XmlNodeList cartesianPoints = polyLoop.ChildNodes; + foreach (XmlNode coordinatePt in cartesianPoints) + { + Vector.CartCoord coord = new Vector.CartCoord(); + if (coordinatePt.HasChildNodes) + { + XmlNodeList coordinates = coordinatePt.ChildNodes; + int pointCount = 1; + foreach (XmlNode coordinate in coordinatePt) + { + + switch (pointCount) + { + case 1: + coord.X = Convert.ToDouble(coordinate.InnerText); + break; + case 2: + coord.Y = Convert.ToDouble(coordinate.InnerText); + break; + case 3: + coord.Z = Convert.ToDouble(coordinate.InnerText); + break; + } + pointCount++; + } + + surfDef.PlCoords.Add(new Vector.MemorySafe_CartCoord(coord.X, coord.Y, coord.Z)); + } + } + } + } + } + } + Vector.MemorySafe_CartVect plRHRVect = GetPLRHR(surfDef.PlCoords); + surfDef.PlRHRVector = new Vector.MemorySafe_CartVect(plRHRVect.X,plRHRVect.Y,plRHRVect.Z); + surfaces.Add(surfDef); + } + return surfaces; + } + catch (Exception e) + { + return surfaces; + } + + } + + private static Vector.MemorySafe_CartVect GetPLRHR(List plCoords) + { + Vector.CartVect plRHRVect = new Vector.CartVect(); + //this list will store all of the rhr values returned by any arbitrary polyloop + List RHRs = new List(); + + int coordCount = plCoords.Count; + for (int i = 0; i < coordCount - 2; i++) + { + Vector.CartVect v1 = Vector.CreateVector(plCoords[i], plCoords[i + 1]); + Vector.CartVect v2 = Vector.CreateVector(plCoords[i + 1], plCoords[i + 2]); + Vector.CartVect uv = Vector.UnitVector(Vector.CrossProduct(v1, v2)); + RHRs.Add(uv); + } + int RHRVectorCount = RHRs.Count; + List distinctRHRs = new List(); + //the Distinct().ToList() routine did not work because, we believe, the item in the list is not recognized by Distinct() + //distinctRHRs = RHRs.Distinct().ToList(); + //so we took the following approach to try and find unique vectors and store them + distinctRHRs.Add(RHRs[0]); + for (int j = 1; j < RHRVectorCount; j++) + { + foreach (Vector.CartVect distinctVector in distinctRHRs) + { + //this could contain wacky RHRs that are removed below + if (RHRs[j].X != distinctVector.X && RHRs[j].Y != distinctVector.Y && RHRs[j].Z != distinctVector.Z) + { + distinctRHRs.Add(RHRs[j]); + } + } + } + + int RHRDistinctVectCount = distinctRHRs.Count; + if (RHRDistinctVectCount == 1) + { + plRHRVect = distinctRHRs[0]; + return new Vector.MemorySafe_CartVect(plRHRVect.X,plRHRVect.Y,plRHRVect.Z); + } + else + { + Dictionary uniqueVectorCount = new Dictionary(); + //determine which vector shows up the most often + foreach (Vector.CartVect distinctVector in distinctRHRs) + { + int count = 0; + foreach (Vector.CartVect vect in RHRs) + { + if (distinctVector.X == vect.X && distinctVector.Y == vect.Y && distinctVector.Z == vect.Z) + { + count++; + } + } + uniqueVectorCount.Add(count, distinctVector); + } + + //returns the vector that has the largest count + //get the largest integer in the list of + //may also be able to use + //uniqueVectorCount.Keys.Max(); + List keysList = uniqueVectorCount.Keys.ToList(); + keysList.Sort(); + int max = 0; + + foreach (int key in keysList) + { + if (key > max) { max = key; } + } + plRHRVect = uniqueVectorCount[max]; + return new Vector.MemorySafe_CartVect(plRHRVect.X,plRHRVect.Y,plRHRVect.Z); + } + } + + //Created July 2016 by Chien Si Harriman. Note the area tolerance checks are based on percentage tolerances and not absolute tolerances. + private void GetSurfaceMatches(SurfaceDefinitions surface, List TestSurfaces, ref DetailedSurfaceSummary ssSummary,Conversions.lengthUnitEnum standardLengthUnits, Conversions.lengthUnitEnum testLengthUnits, double testlengthConversion, double standardlengthConversion, Conversions.areaUnitEnum standardAreaUnits, Conversions.areaUnitEnum testAreaUnits, double testareaConversion, double standardareaConversion) + { + try{ + List possiblesList1 = new List(); + List possiblesList2 = new List(); + + bool adjSpaceIdMatch = false; + bool isLowTiltObject = false; + bool isHighTiltObject = false; + bool interiorWallFlipped = false; + bool issurfaceRegular = false; + bool istestSurfaceRegular = false; + //try to find a surface in the test file that has the same: + //adjacent space Id signature + //surfaceType + //free list is 1 + //list 2 is not used + ssSummary.ID = surface.SurfaceId; + ssSummary.AreaUnits = "SquareFeet"; //TODO, try to remove this hardcoding. + ssSummary.TotalSurfaceArea = GetSurfaceArea(surface,standardareaConversion); +#region + logger.Info("SURFACE ID: " + surface.SurfaceId); + logger.Info("START SUBTEST: AdjacencyId check."); + for(int ts = 0; ts 1) + { + logger.Info("TEST SUMMARY: Based on a comparison of the surface Type and Adjacent SpaceIds, there are " + possiblesList1.Count.ToString() + " surfaces in the test file that are possible matches for " + surface.SurfaceId + " of the Standard File."); + } + else + { + logger.Error("TEST SUMMARY: In the vendor test file, no matches could be found for this surface that have the same AdjacentSpaceId(s) and SurfaceType."); + ssSummary.FoundMatch = false; + return; + } +#endregion + //there is at least one surface that matches the above criteria + //now checking for tilt and azimuth criteria, as these have to match + //TODO: consider removing, minor clean up + if (possiblesList1.Count > 0) + { + logger.Info("START SUBTEST: Azimuth and Tilt check."); + foreach (SurfaceDefinitions testSurface in possiblesList1) + { + double tiltDifference = 0; + double azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); +#region + if(isLowTiltObject) + { + if(IsLowTiltSurface(testSurface)) //they are the same, both have small tilts + { + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + } + else //they are 180 degrees different, and the test surface is a high tilt while the standard is low tilt + { + if (testSurface.SurfaceType == "InteriorFloor") + { + tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); + logger.Info("PROGRAMMER'S NOTE: Expecting test surface type to be Interior Floor"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: "+testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + } + + //no azimuth tests for horizontal surfaces + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + //if the is within tolerance + else + { + //if the surface is horizontal, just add to the free List because we don't check for azimuth in this case + if (surface.Tilt == 0) + { + possiblesList2.Add(testSurface); + if (tiltDifference == 0) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS:PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + } + else + { + logger.Info("START SUBTEST: azimuth checks."); + //check the azimuth + if (azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH FAILED"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0 && azimuthDifference == 0) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS:PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + } + } + } + } + else if (isHighTiltObject) + { + if(IsHighTiltSurface(testSurface)) //both high tilt interior surfaces + { + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + } + else //standard is high tilt, test is low tilt + { + if(testSurface.SurfaceType == "Ceiling") + { + tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); + logger.Info("PROGRAMMER' NOTE: Expected surfaceType to be Ceiling."); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + } + + //no azimuth tests + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //if the surface is horizontal, just add to the free List because we don't check for azimuth in this case + if (surface.Tilt == 180) + { + possiblesList2.Add(testSurface); + if (tiltDifference == 0) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS:PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + } + else + { + //check the azimuth + if (azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH FAILED"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0 && azimuthDifference == 0) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS:PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + } + } + } + } +#endregion //the surface is neither a ceiling nor a floor, it is just something regular + else + { + azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); + if (interiorWallFlipped) //both high tilt interior surfaces + { + azimuthDifference = Math.Abs(Math.Abs(testSurface.Azimuth - surface.Azimuth) - 180); //180 is needed because they should be separated by 180 + } + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + + //if the tilt and azimuth is outside of tolerance + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt Or Azimuth MATCH FAILED"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0 && azimuthDifference == 0) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt And Azimuth MATCH SUCCESS:PERFECT"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt And Azimuth MATCH SUCCESS"); + logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + } + } + } + logger.Info("END SUBTEST: Azimuth and Tilt check."); + } + } + // //report to the user that no matches could be found + else + { + logger.Error("TEST SUMMARY: In the test file, surfaces could be found that match the standard file's AdjacentSpaceId and SurfaceType, but of these matches, none could be identified that also have a tilt or azimuth that exactly matches the standard file's, or is within the allowable tolerance."); + ssSummary.FoundMatch = false; + return; + } + + //clear the first list + possiblesList1.Clear(); + //start to loop through the new refined list + //generally want to look at the polyLoop coordinates + //list 2 is analyzed + //list 1 is free + ; + if (possiblesList2.Count > 0) + { + //simple method from this point forward is just to simply start doing a polyloop check + //check the standard surface PolyLoop and the test Surface(s) polyloop(s) + //check the absolute coordinates of the testSurface(s) polyloop(s) + + if (possiblesList2.Count == 1) + { + logger.Info("PROGRAMMER'S INFO: Only one Surface Candidate remaining."); + //meaning there is only one candidate still available + //go on to test the polyLoop coordinates and the insertion point + possiblesList1.Add(possiblesList2[0]); + //this should mean theoretically that you can do a one for one comparison and do the simplistic check + possiblesList2.Clear(); + //polyLoop absolute coordinates + //list 1 is analyzed + //list 2 is free + logger.Info("START SUBTEST: PolyLoop coordinate checks."); + #region + if (possiblesList1.Count > 0) + { + foreach (SurfaceDefinitions testSurface in possiblesList1) + { + //check the polyLoop coordinates + bool coordsMatch = false; + foreach (Vector.MemorySafe_CartCoord standardPolyLoopCoord in surface.PlCoords) + { + coordsMatch = GetPolyLoopCoordMatch(standardPolyLoopCoord, testSurface, surface.SurfaceId, testlengthConversion, standardlengthConversion); + if (coordsMatch) + { + continue; + } + else + { + logger.Info("TEST SURFACE: "+testSurface.SurfaceId+ " polyloop coordinate MATCH FAILED. It has been removed from the candidate list."); + break; + } + } + if (coordsMatch) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " polyloop coordinate MATCH SUCCESS."); + possiblesList2.Add(testSurface); + } + } + } + else + { + logger.Error("TEST SUMMARY: In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, and Surface Area. Failed when attempting to match its polyloop coordinates."); + ssSummary.FoundMatch = false; + return; + } + logger.Info("END SUBTEST: PolyLoop coordinate checks."); + #endregion + possiblesList1.Clear(); + + issurfaceRegular = IsSurfaceRegular(surface); + //Insertion point tests. + if (!isHighTiltObject && !isLowTiltObject && issurfaceRegular) //no point in doing these checks if thing is not square and regular + #region + { + logger.Info("PROGRAMMER'S NOTE: Standard Surface is square or rectangle non-horizontal. Assumption that test surface candidate should also be same type of shape."); + logger.Info("START SUBTEST: Insertion Point Coordinate check."); + if (possiblesList2.Count > 0) + { + //check the insertion point coordinate + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + //now match the differences + double insPtXDiff = Math.Abs((testSurface.InsertionPoint.X * testlengthConversion) - (surface.InsertionPoint.X * standardlengthConversion)); + double insPtYDiff = Math.Abs((testSurface.InsertionPoint.Y * testlengthConversion) - (surface.InsertionPoint.Y * standardlengthConversion)); + double insPtZDiff = Math.Abs((testSurface.InsertionPoint.Z * testlengthConversion) - (surface.InsertionPoint.Z * standardlengthConversion)); + //TODO: this interior flipped algorithm could be improved vastly. How to tell if in lower left has not been solved. + if (interiorWallFlipped) + { + logger.Info("PROGRAMMER'S NOTE: The azimuths are flipped. Adjusting the insertion point test to factor this into account.."); + //find the complimenting insertion point + for (int pt = 0; pt < testSurface.PlCoords.Count; pt++) + { + if (Math.Abs((surface.InsertionPoint.Z * standardlengthConversion) - (testSurface.PlCoords[pt].Z * testlengthConversion)) < DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) + { + if (Math.Abs((surface.InsertionPoint.X * standardlengthConversion) - testSurface.PlCoords[pt].X * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance) + { + if (Math.Abs((surface.InsertionPoint.Y * standardlengthConversion) - testSurface.PlCoords[pt].Y * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance) + { + //a match + insPtXDiff = Math.Abs((testSurface.PlCoords[pt].X * testlengthConversion) - (surface.InsertionPoint.X * standardlengthConversion)); + insPtYDiff = Math.Abs((testSurface.PlCoords[pt].Y * testlengthConversion) - (surface.InsertionPoint.Y * standardlengthConversion)); + insPtZDiff = Math.Abs((testSurface.PlCoords[pt].Z * testlengthConversion) - (surface.InsertionPoint.Z * standardlengthConversion)); + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + "Insertion Point MATCH SUCCESS."); + possiblesList1.Add(testSurface); + break; + } + else + { + //didn't find a candidate + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + "Insertion Point did not Match."); + } + } + } + } + } + else + { + if (insPtXDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance || insPtYDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance || insPtZDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH FAILED. It has been removed as a candidate."); + continue; + } + else + { + //possible match + if (insPtXDiff == 0 && insPtYDiff == 0 && insPtZDiff == 0) + { + //perfect match + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH SUCCESS:PERFECT"); + possiblesList1.Add(testSurface); + } + else + { + logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH SUCCESS"); + possiblesList1.Add(testSurface); + } + } + } + } + } + else + { + logger.Error("TEST SUMMARY: In the test file, no surfaces could be found that match standard file's Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, and PolyLoop Coordinates. Failed when matching PolyLoop coordinates."); + ssSummary.FoundMatch = false; + return; + } + possiblesList2.Clear(); + logger.Info("END SUBTEST: Insertion point coordinate check."); + + if (possiblesList1.Count == 1) + { + logger.Info("TEST SUMMARY: MATCH SUCCESS for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed."); + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + + globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + ssSummary.FoundMatch = true; + ssSummary.TestSurfaceIDs = testFileSurfIds; + ssSummary.TotalTestSurfaceArea = GetSurfaceArea(possiblesList1[0],testareaConversion); + + return; + } + else if (possiblesList1.Count == 0) + { + logger.Error("TEST SUMMARY: No surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."); + ssSummary.FoundMatch = false; + return; + } + else if (possiblesList1.Count > 1) + { + logger.Error("TEST SUMMARY: Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."); + ssSummary.FoundMatch = false; + return; + } + + } + #endregion + else + { + if (possiblesList2.Count == 1) //we do not check insertion points for horizontal surfaces. + { + possiblesList1 = possiblesList2; //this is just to keep the below code consistent with convention. + + logger.Info("TEST FILE SUCCESS: for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed."); + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + + globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + ssSummary.FoundMatch = true; + ssSummary.TestSurfaceIDs = testFileSurfIds; + ssSummary.TotalTestSurfaceArea = GetSurfaceArea(possiblesList1[0],testareaConversion); + + return; + } + else if (possiblesList2.Count == 0) + { + logger.Error("TEST FILE FAILURE: No surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."); + ssSummary.FoundMatch = false; + return; + } + else if (possiblesList2.Count > 1) + { + logger.Error("TEST FILE FAILRE: Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."); + ssSummary.FoundMatch = false; + return; + } + } + } + //more than one candidate still exists even after the adjacency test, surfaceType test, and tilt and azimuth tests, so filter through +#region + else + { + //check to see if the remaining area sums matches the standard surface area, + //and that the edges of the test surfaces do not overlap, etc. + //first try to find if the standard file has a regular rectangular or square profile + logger.Debug("More than one surface remains in the test subset."); + logger.Info("PROGRAMMER'S NOTE: Performing advanced surface bondary tests."); + //checks to see if the testSurface vertices all lie within the standard surface polygonal boundary + foreach(SurfaceDefinitions testSurface in possiblesList2) + { + logger.Info("Testing test surface " +testSurface.SurfaceId); + if(DoesSurfaceContainSurface(surface,testSurface,testlengthConversion,standardlengthConversion)) + { + possiblesList1.Add(testSurface); + } + } + + //now we check to see which of the remaining surfaces and their edges form a coherent surface. + //do their edges overlap? is the polygon self-intersecting? + //add the surface the the existing possibles list + possiblesList1.Insert(0, surface); + var edgeDict = FindMatchingEdges(possiblesList1); + if(EdgesAreValid(edgeDict)) + { + //finally, we see if the total area of the remaining surfaces equals the area of the standard surface. If all this above has passed, it should not be an issue. + double standardArea = ssSummary.TotalSurfaceArea; + double testArea = 0; + //remove the zero index surface, because this is the standard surface + possiblesList1.RemoveAt(0); + //these are the remaining candidates + foreach(var ts in possiblesList1) + { + testArea += GetSurfaceArea(ts,testareaConversion); + } + + if(Math.Abs(standardArea - testArea)/standardArea < DOEgbXMLBasics.Tolerances.AreaPercentageTolerance) + { + logger.Info("TEST FILE SUCCESS: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining meet the allowable gemoetry constraints."); + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + ssSummary.FoundMatch = true; + ssSummary.TestSurfaceIDs = testFileSurfIds; + ssSummary.TotalTestSurfaceArea = testArea; + } + else + { + logger.Info("TEST FILE FAILURE: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining did not pass the area test."); + } + } + else + { + //problem + logger.Info("TEST FILE FAILURE: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining do not meet the allowable geometry constraints."); + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + + globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + ssSummary.FoundMatch = false; + ssSummary.TestSurfaceIDs = testFileSurfIds; + } + + } +#endregion + } + } + catch(Exception e){ + logger.Fatal(e.ToString()); + return; + } + + } + + public bool EdgesAreValid(Dictionary edges) + { + //stores the findings of each unique edge test + Dictionary edgeResults = new Dictionary(); + foreach(KeyValuePair kp in edges) + { + var edge = kp.Value; + var lengthTol = DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance; + Vector.MemorySafe_CartVect edgeVec = Vector.CreateMemorySafe_Vector(edge.startendpt[0], edge.startendpt[1]); + double edgeMag = Vector.VectorMagnitude(edgeVec); + double edgesLength = 0; + for(int re = 0; re< edge.relatedEdges.Count(); re++) + { + Vector.MemorySafe_CartVect e = Vector.CreateMemorySafe_Vector(edge.relatedEdges[re].startendpt[0], edge.relatedEdges[re].startendpt[1]); + double mag = Vector.VectorMagnitude(e); + edgesLength += mag; + } + if(Math.Abs(edgesLength - edgeMag) < lengthTol) + { + //check for case where the edges endpoints should match nearly exactly + if(edge.relatedEdges.Count() == 1) + { + double diffx = Math.Abs(edge.startendpt[0].X - edge.relatedEdges[0].startendpt[0].X); + double diffy = Math.Abs(edge.startendpt[0].Y - edge.relatedEdges[0].startendpt[0].Y); + double diffz = Math.Abs(edge.startendpt[0].Z - edge.relatedEdges[0].startendpt[0].Z); + if(diffx <= lengthTol && diffy <= lengthTol && diffz <= lengthTol) + { + diffx = Math.Abs(edge.startendpt[1].X - edge.relatedEdges[0].startendpt[1].X); + diffy = Math.Abs(edge.startendpt[1].Y - edge.relatedEdges[0].startendpt[1].Y); + diffz = Math.Abs(edge.startendpt[1].Z - edge.relatedEdges[0].startendpt[1].Z); + if(diffx <= lengthTol && diffy <= lengthTol && diffz <= lengthTol) + { + //edges match + edgeResults[kp.Key] = true; + } + } + + diffx = Math.Abs(edge.startendpt[1].X - edge.relatedEdges[0].startendpt[0].X); + diffy = Math.Abs(edge.startendpt[1].Y - edge.relatedEdges[0].startendpt[0].Y); + diffz = Math.Abs(edge.startendpt[1].Z - edge.relatedEdges[0].startendpt[0].Z); + if (diffx <= lengthTol && diffy <= lengthTol && diffz <= lengthTol) + { + diffx = Math.Abs(edge.startendpt[0].X - edge.relatedEdges[0].startendpt[1].X); + diffy = Math.Abs(edge.startendpt[0].Y - edge.relatedEdges[0].startendpt[1].Y); + diffz = Math.Abs(edge.startendpt[0].Z - edge.relatedEdges[0].startendpt[1].Z); + if (diffx <= lengthTol && diffy <= lengthTol && diffz <= lengthTol) + { + //edges match + edgeResults[kp.Key] = true; + } + } + } + else + { + //there is more than one related edge for the given edge + int validRelatedEdgesCount = 0; + List middleEdges = new List(); + DOEgbXMLBasics.EdgeFamily startEdge = new DOEgbXMLBasics.EdgeFamily(); + DOEgbXMLBasics.EdgeFamily endEdge = new DOEgbXMLBasics.EdgeFamily(); + //perform a standard parametric line check + + //this finds the indices of the related edges that share the start and end point of the edge + var edgeStartPt = edge.startendpt[0]; + var edgeEndPt = edge.startendpt[1]; + for(int re = 0; re< edge.relatedEdges.Count(); re++) + { + var relatedEdge = edge.relatedEdges[re]; + double startdX = Math.Abs(relatedEdge.startendpt[0].X - edgeStartPt.X); + double startdY = Math.Abs(relatedEdge.startendpt[0].Y - edgeStartPt.Y); + double startdZ = Math.Abs(relatedEdge.startendpt[0].Z - edgeStartPt.Z); + if(startdX <= lengthTol && startdY <= lengthTol && startdZ <= lengthTol) + { + startEdge = relatedEdge; + continue; + } + startdX = Math.Abs(relatedEdge.startendpt[1].X - edgeStartPt.X); + startdY = Math.Abs(relatedEdge.startendpt[1].Y - edgeStartPt.Y); + startdZ = Math.Abs(relatedEdge.startendpt[1].Z - edgeStartPt.Z); + if (startdX <= lengthTol && startdY <= lengthTol && startdZ <= lengthTol) + { + startEdge = relatedEdge; + continue; + } + double enddX = Math.Abs(relatedEdge.startendpt[1].X - edgeEndPt.X); + double enddY = Math.Abs(relatedEdge.startendpt[1].Y - edgeEndPt.Y); + double enddZ = Math.Abs(relatedEdge.startendpt[1].Z - edgeEndPt.Z); + if (enddX <= lengthTol && enddY <= lengthTol && enddZ <= lengthTol) + { + endEdge = relatedEdge; + continue; + } + enddX = Math.Abs(relatedEdge.startendpt[0].X - edgeEndPt.X); + enddY = Math.Abs(relatedEdge.startendpt[0].Y - edgeEndPt.Y); + enddZ = Math.Abs(relatedEdge.startendpt[0].Z - edgeEndPt.Z); + if (enddX <= lengthTol && enddY <= lengthTol && enddZ <= lengthTol) + { + endEdge = relatedEdge; + continue; + } + middleEdges.Add(relatedEdge); + } + + //now I should have a startEdge, middleEdges, and an end Edge. It is legal for there to be no middleEdges + //it is illegal for the start and end edge to be undefined. + if(startEdge.relatedEdges.Count == 0 || endEdge.relatedEdges.Count == 0) + { + logger.Info("Something"); + return false; + } + //an algorithm to make sure all the edges line up well. We should have already established that they are parallel when finding edges previously. + if(middleEdges.Count == 0) + { + if(EdgesShareVertex(startEdge,endEdge)) + { + //this should be a sufficient test, because + //the edges are parallel, the start edge is at the start vertex , the end edge is at the other vertex + //there are no middle edges + //the sum of these two edges is equal to the length of the edge in the keyvalue pair. + edgeResults[kp.Key] = true; + } + else + { + edgeResults[kp.Key] = false; + } + } + else + { + //there are middle edges + int validRelatedEdgesCt = 0; + //one simple way to do this is to check to see if all vertices can find a counterpart + for(int me = 0 ; me < middleEdges.Count();me++) + { + int validatedVertices = 0; + foreach(var pt in middleEdges[me].startendpt) + { + if (EdgesShareVertex(pt, startEdge)) { validatedVertices++; validRelatedEdgesCount++; } //we add here because startedge is now completely valid + if (EdgesShareVertex(pt, endEdge)) { validatedVertices++; } + if (FoundVertexMatch(pt, middleEdges)) { validatedVertices++; validRelatedEdgesCount++; } //we add here because endedge is now completely valid + } + if(validatedVertices == 2) { validRelatedEdgesCount++; } + } + if (validRelatedEdgesCt == edge.relatedEdges.Count) { edgeResults[kp.Key] = true; } + else { edgeResults[kp.Key] = false; } + //another method, TBD, is to create a vector with each related edge, find its scalar to the edge, and the scalars should sum to 1 + } + } + } + else + { + //related edges exceed length of the edge + //TBD for future validation exercises + } + } + var keys = edgeResults.Where(x => x.Value==false).Select(x => x.Key); + if (keys.Count() > 0) { return false; } + else { return true; } + + } + + public bool FoundVertexMatch(Vector.MemorySafe_CartCoord vertex, List edges) + { + int vertexMatchCount = 0; + for(int e = 0; e < edges.Count(); e++) + { + if(EdgesShareVertex(vertex,edges[e])) + { + vertexMatchCount++; + } + } + + if(vertexMatchCount == 1) + { + return true; + } + else + { + return false; + } + + } + + public bool EdgesShareVertex(Vector.MemorySafe_CartCoord vertex, DOEgbXMLBasics.EdgeFamily edge) + { + double lengthTol = DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance; + double dx = Math.Abs(vertex.X - edge.startendpt[0].X); + double dy = Math.Abs(vertex.Y - edge.startendpt[0].Y); + double dz = Math.Abs(vertex.Z - edge.startendpt[0].Z); + if(dx <= lengthTol && dy <= lengthTol && dz <= lengthTol) + { + return true; + } + + dx = Math.Abs(vertex.X - edge.startendpt[1].X); + dy = Math.Abs(vertex.Y - edge.startendpt[1].Y); + dz = Math.Abs(vertex.Z - edge.startendpt[1].Z); + + if(dx <= lengthTol && dy <= lengthTol && dz <= lengthTol) + { + return true; + } + + return false; + } + + public bool EdgesShareVertex(DOEgbXMLBasics.EdgeFamily edge, DOEgbXMLBasics.EdgeFamily checkEdge) + { + double lengthTol = DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance; + double startdX = Math.Abs(edge.startendpt[0].X - checkEdge.startendpt[0].X); + double startdY = Math.Abs(edge.startendpt[0].Y - checkEdge.startendpt[0].Y); + double startdZ = Math.Abs(edge.startendpt[0].Z - checkEdge.startendpt[0].Z); + if (startdX <= lengthTol && startdY <= lengthTol && startdZ <= lengthTol) + { + return true; + } + startdX = Math.Abs(edge.startendpt[0].X - checkEdge.startendpt[1].X); + startdY = Math.Abs(edge.startendpt[0].Y - checkEdge.startendpt[1].Y); + startdZ = Math.Abs(edge.startendpt[0].Z - checkEdge.startendpt[1].Z); + if (startdX <= lengthTol && startdY <= lengthTol && startdZ <= lengthTol) + { + return true; + } + double enddX = Math.Abs(edge.startendpt[1].X - checkEdge.startendpt[1].X); + double enddY = Math.Abs(edge.startendpt[1].Y - checkEdge.startendpt[1].Y); + double enddZ = Math.Abs(edge.startendpt[1].Z - checkEdge.startendpt[1].Z); + if (enddX <= lengthTol && enddY <= lengthTol && enddZ <= lengthTol) + { + return true; + } + enddX = Math.Abs(edge.startendpt[1].X - checkEdge.startendpt[0].X); + enddY = Math.Abs(edge.startendpt[1].Y - checkEdge.startendpt[0].Y); + enddZ = Math.Abs(edge.startendpt[1].Z - checkEdge.startendpt[0].Z); + if (enddX <= lengthTol && enddY <= lengthTol && enddZ <= lengthTol) + { + return true; + } + return false; + } + + public static Dictionary FindMatchingEdges(List sblist) + { + + Dictionary uniqueedges = new Dictionary(); + int distinctedges = 0; + foreach (SurfaceDefinitions sb in sblist) + { + int coordcount = sb.PlCoords.Count; + for (int i = 0; i < coordcount; i++) + { + //initialize the edge being tested, the test edge + DOEgbXMLBasics.EdgeFamily currentedge = new DOEgbXMLBasics.EdgeFamily(); + currentedge.sbdec = sb.SurfaceId; + currentedge.relatedEdges = new List(); + currentedge.startendpt = new List(); + if (uniqueedges.Count == 0) + { + uniqueedges[distinctedges] = currentedge; + //get the first coord in this set, and the coord next to it + currentedge.startendpt.Add(sb.PlCoords[i]); + currentedge.startendpt.Add(sb.PlCoords[i + 1]); + distinctedges++; + continue; + + } + //most edges work the same, in terms of the start and end point, except for the last edge (the else case) + if (i < coordcount - 1) + { + currentedge.startendpt.Add(sb.PlCoords[i]); + currentedge.startendpt.Add(sb.PlCoords[i + 1]); + } + else + { + currentedge.startendpt.Add(sb.PlCoords[i]); + currentedge.startendpt.Add(sb.PlCoords[0]); + } + + //search through existing edges to try and find a perfect match + int edgecount = 0; //keeps track of how many guest edges in the dictionary I've searched through + foreach (KeyValuePair kp in uniqueedges) + { + + Vector.MemorySafe_CartCoord startpt = kp.Value.startendpt[0]; + //looking for a perfect match of endpoints. If both match, then the + //current edge is added to the current key/value pair's related edges. + #region + double diffx = Math.Abs(startpt.X - currentedge.startendpt[0].X); + double diffy = Math.Abs(startpt.Y - currentedge.startendpt[0].Y); + double diffz = Math.Abs(startpt.Z - currentedge.startendpt[0].Z); + double tol = DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance; + if (diffx <= tol && diffy <= tol && diffz <= tol) + { + //found at least one perfect coordinate match, try to match the second + Vector.MemorySafe_CartCoord endpt = kp.Value.startendpt[1]; + diffx = Math.Abs(endpt.X - currentedge.startendpt[1].X); + diffy = Math.Abs(endpt.Y - currentedge.startendpt[1].Y); + diffz = Math.Abs(endpt.Z - currentedge.startendpt[1].Z); + if (diffx <= tol && diffy <= tol && diffz <= tol) + { + //both match, means the match is perfect, so add it to the related surfaces list + kp.Value.relatedEdges.Add(currentedge); + break; + //key value pair break; + } + else + { + //the edge may be unique, though it could still have neighboring relationships + //draw vector A + double Ax = endpt.X - currentedge.startendpt[1].X; + double Ay = endpt.Y - currentedge.startendpt[1].Y; + double Az = endpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect A = new Vector.MemorySafe_CartVect(Ax, Ay, Az); + double Amag = Vector.VectorMagnitude(A); + + //take cross product to see if they are even in same plane + double evX = endpt.X - startpt.X; + double evY = endpt.Y - startpt.Y; + double evZ = endpt.Z - startpt.Z; + Vector.MemorySafe_CartVect ev = new Vector.MemorySafe_CartVect(evX, evY, evZ); + double evmag = Vector.VectorMagnitude(ev); + Vector.MemorySafe_CartVect cross = Vector.CrossProduct(A, ev); + //TODO: do we need to make this a unit vector? + double crossmag = Vector.VectorMagnitude(cross); + if (Math.Abs(crossmag) < DOEgbXMLBasics.Tolerances.crossProductTolerance) + { + //then we are at least parallel or antiparallel, now see if the point resides on the edge or outside of it + double Bx = startpt.X - currentedge.startendpt[1].X; + double By = startpt.Y - currentedge.startendpt[1].Y; + double Bz = startpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect B = new Vector.MemorySafe_CartVect(Bx, By, Bz); + double Bmag = Vector.VectorMagnitude(B); + //check to see if the test edge is inside the guest edge + if (Amag < evmag && Bmag < evmag) + { + //this means it lies on the plane at least, so it shares, but it is also still independent because a perfect match wasn't found + kp.Value.relatedEdges.Add(currentedge); + //accumulate its own relationships + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; //continue looping through unique edges, if any + } + + double edgeX = currentedge.startendpt[1].X - currentedge.startendpt[0].X; + double edgeY = currentedge.startendpt[1].Y - currentedge.startendpt[0].Y; + double edgeZ = currentedge.startendpt[1].Z - currentedge.startendpt[0].Z; + Vector.MemorySafe_CartVect edgevec = new Vector.MemorySafe_CartVect(edgeX, edgeY, edgeZ); + double edgemag = Vector.VectorMagnitude(edgevec); + + double Cx = startpt.X - currentedge.startendpt[1].X; + double Cy = startpt.Y - currentedge.startendpt[1].Y; + double Cz = startpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect C = new Vector.MemorySafe_CartVect(Cx, Cy, Cz); + double Cmag = Vector.VectorMagnitude(C); + + double Dx = endpt.X - currentedge.startendpt[1].X; + double Dy = endpt.Y - currentedge.startendpt[1].Y; + double Dz = endpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect D = new Vector.MemorySafe_CartVect(Dx, Dy, Dz); + double Dmag = Vector.VectorMagnitude(D); + + if (Dmag < edgemag && Cmag <= edgemag) + { + //this means the test edge is longer than the guest edge, but they overlap + kp.Value.relatedEdges.Add(currentedge); + //the edge is still unique but accumulates a neighbor + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; + } + } + else + { + //this other point isn't relevant, and the edges don't coincide + edgecount++; + continue; + } + } + } //end situation where first points match, next test if end points match + #endregion + else if (Math.Abs(startpt.X-currentedge.startendpt[1].X) < tol && Math.Abs(startpt.Y-currentedge.startendpt[1].Y) DOEgbXMLBasics.Tolerances.crossProductTolerance) + { + //they are not even parallel so move on + edgecount++; + continue; + } + + //is one of the points inside of the edge? + //test edge point 1 + double Ax = endpt.X - currentedge.startendpt[0].X; + double Ay = endpt.Y - currentedge.startendpt[0].Y; + double Az = endpt.Z - currentedge.startendpt[0].Z; + Vector.MemorySafe_CartVect A = new Vector.MemorySafe_CartVect(Ax, Ay, Az); + double Amag = Vector.VectorMagnitude(A); + + //TODO: Remove + //evX = endpt.X - startpt.X; + //evY = endpt.Y - startpt.Y; + //evZ = endpt.Z - startpt.Z; + double uniqueMag = Vector.VectorMagnitude(ev); + + double Bx = startpt.X - currentedge.startendpt[0].X; + double By = startpt.Y - currentedge.startendpt[0].Y; + double Bz = startpt.Z - currentedge.startendpt[0].Z; + Vector.MemorySafe_CartVect B = new Vector.MemorySafe_CartVect(Bx, By, Bz); + double Bmag = Vector.VectorMagnitude(B); + //check to see if the test edge's first point (index 0) is totally inside the guest edge + // start x ---------- 0 --------------x end + if (Amag + Bmag - uniqueMag < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //this is enough to prove that the two edges overlap, because we've already proven the two vectors are parallel + //then it is inside as well, and test vector is engulfed by guest vector + kp.Value.relatedEdges.Add(currentedge); + //but the edge is still itself unique + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; + } + //test edge point 2 + double Cx = endpt.X - currentedge.startendpt[1].X; + double Cy = endpt.Y - currentedge.startendpt[1].Y; + double Cz = endpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect C = new Vector.MemorySafe_CartVect(Cx, Cy, Cz); + double Cmag = Vector.VectorMagnitude(C); + + //we are at least parallel, now to check for a real intersection + double Dx = startpt.X - currentedge.startendpt[1].X; + double Dy = startpt.Y - currentedge.startendpt[1].Y; + double Dz = startpt.Z - currentedge.startendpt[1].Z; + Vector.MemorySafe_CartVect D = new Vector.MemorySafe_CartVect(Dx, Dy, Dz); + double Dmag = Vector.VectorMagnitude(D); + // start x ---------- 1 --------------x end + if (Cmag + Dmag - uniqueMag < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //this is enough to prove that the two edges overlap, because we've already proven the two vectors are parallel + //then it is inside as well, and test vector is engulfed by guest vector + kp.Value.relatedEdges.Add(currentedge); + //but the edge is still itself unique + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; + } + + //now check to see if the two points overlap the edge and contain it completely, this is also a valid condition + // 0 -------xstart------------------xend-------1 + double edgeMag = Vector.VectorMagnitude(edgev); + //use A,B + if(Amag + Bmag - edgeMag < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //this is enough to prove that the two edges overlap, because we've already proven the two vectors are parallel + //then it is inside as well, and test vector is engulfed by guest vector + kp.Value.relatedEdges.Add(currentedge); + //but the edge is still itself unique + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; + } + //use C,D + if (Cmag + Dmag - edgeMag < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //this is enough to prove that the two edges overlap, because we've already proven the two vectors are parallel + //then it is inside as well, and test vector is engulfed by guest vector + kp.Value.relatedEdges.Add(currentedge); + //but the edge is still itself unique + currentedge.relatedEdges.Add(kp.Value); + edgecount++; + continue; + } + + //no matches were found. log the issue and increase the edgecount nonetheless + //logger.Info("Edge (" + currentedge.startendpt[0].X + "," + currentedge.startendpt[0].Y + "," + currentedge.startendpt[0].Z + ")->(" + currentedge.startendpt[0].X + "," + currentedge.startendpt[0].Y + "," + currentedge.startendpt[0].Z + ") has not found a match."); + edgecount++; + } +#endregion + #region + //{ + // Vector.MemorySafe_CartCoord endpt = kp.Value.startendpt[1]; + // //are the two vectors even parallel? because if they are not, no need to get more complex + // double evX = endpt.X - startpt.X; + // double evY = endpt.Y - startpt.Y; + // double evZ = endpt.Z - startpt.Z; + // Vector.MemorySafe_CartVect ev = new Vector.MemorySafe_CartVect(evX, evY, evZ); + // double edgeX = currentedge.startendpt[1].X - currentedge.startendpt[0].X; + // double edgeY = currentedge.startendpt[1].Y - currentedge.startendpt[0].Y; + // double edgeZ = currentedge.startendpt[1].Z - currentedge.startendpt[0].Z; + // Vector.MemorySafe_CartVect edgev = new Vector.MemorySafe_CartVect(edgeX, edgeY, edgeZ); + // //TODO: add tolerance + // if (Vector.VectorMagnitude(Vector.CrossProduct(ev, edgev)) != 0) + // { + // //they are not even parallel so move on + // edgecount++; + // continue; + // } + + // //try to determine if the two edges are parallel + // //test edge point 1 + // double Ax = endpt.X - currentedge.startendpt[0].X; + // double Ay = endpt.Y - currentedge.startendpt[0].Y; + // double Az = endpt.Z - currentedge.startendpt[0].Z; + // Vector.MemorySafe_CartVect A = new Vector.MemorySafe_CartVect(Ax, Ay, Az); + // double Amag = Vector.VectorMagnitude(A); + + // //take cross product to see if they are even in same plane + // evX = endpt.X - startpt.X; + // evY = endpt.Y - startpt.Y; + // evZ = endpt.Z - startpt.Z; + // Vector.MemorySafe_CartVect ev1 = new Vector.MemorySafe_CartVect(evX, evY, evZ); + // double guestmag = Vector.VectorMagnitude(ev1); + // Vector.MemorySafe_CartVect cross1 = Vector.CrossProduct(A, ev1); + // double crossmag = Vector.VectorMagnitude(cross1); + // //tolerance? + // if (crossmag == 0) + // { + // //we are at least parallel, now to check for a real intersection + // double Bx = startpt.X - currentedge.startendpt[0].X; + // double By = startpt.Y - currentedge.startendpt[0].Y; + // double Bz = startpt.Z - currentedge.startendpt[0].Z; + // Vector.MemorySafe_CartVect B = new Vector.MemorySafe_CartVect(Bx, By, Bz); + // double Bmag = Vector.VectorMagnitude(B); + // //check to see if the test edge's first point (index 0) is totally inside the guest edge + // if (Amag < guestmag && Bmag < guestmag) + // #region + // { + // //the start point of the test edge is inside the guest edge + // //test edge point 2 against guest edge point 2 + // double Cx = endpt.X - currentedge.startendpt[1].X; + // double Cy = endpt.Y - currentedge.startendpt[1].Y; + // double Cz = endpt.Z - currentedge.startendpt[1].Z; + // Vector.MemorySafe_CartVect C = new Vector.MemorySafe_CartVect(Cx, Cy, Cz); + // double Cmag = Vector.VectorMagnitude(C); + // Vector.MemorySafe_CartVect cross2 = Vector.CrossProduct(C, ev); + // crossmag = Vector.VectorMagnitude(cross2); + // if (crossmag == 0) + // { + // //we are at least parallel, in fact we have proven we are totall parallel, now intersect + // double Dx = startpt.X - currentedge.startendpt[1].X; + // double Dy = startpt.Y - currentedge.startendpt[1].Y; + // double Dz = startpt.Z - currentedge.startendpt[1].Z; + // Vector.MemorySafe_CartVect D = new Vector.MemorySafe_CartVect(Dx, Dy, Dz); + // double Dmag = Vector.VectorMagnitude(D); + // if (Cmag < guestmag && Dmag < guestmag) + // { + // //then it is inside as well, and test vector is engulfed by guest vector + // kp.Value.relatedEdges.Add(currentedge); + // //but the edge is still itself unique + // currentedge.relatedEdges.Add(kp.Value); + // edgecount++; + // continue; + // } + // else + // { + // //I am pretty sure that by default, they are still neighbors and this is no difference + // //it simply extends beyond one of the ends of the guest vector + // kp.Value.relatedEdges.Add(currentedge); + // //but the edge is still itself unique + // currentedge.relatedEdges.Add(kp.Value); + // edgecount++; + // continue; + // } + + + // } + // else + // { + // //we are not parallel, so this is not an adjacency match + // edgecount++; + // continue; + // } + // } + + // else + // { + // //if test edge start point [index 0] is outside, is one of the guest points inside? + // //already computed B + // double Cx = startpt.X - currentedge.startendpt[1].X; + // double Cy = startpt.Y - currentedge.startendpt[1].Y; + // double Cz = startpt.Z - currentedge.startendpt[1].Z; + // Vector.MemorySafe_CartVect C = new Vector.MemorySafe_CartVect(Cx, Cy, Cz); + // double Cmag = Vector.VectorMagnitude(C); + + // edgeX = currentedge.startendpt[1].X - currentedge.startendpt[0].X; + // edgeY = currentedge.startendpt[1].Y - currentedge.startendpt[0].Y; + // edgeZ = currentedge.startendpt[1].Z - currentedge.startendpt[0].Z; + // Vector.MemorySafe_CartVect edgevec = new Vector.MemorySafe_CartVect(edgeX, edgeY, edgeZ); + // double edgemag = Vector.VectorMagnitude(edgevec); + + // if (Cmag < edgemag && Bmag < edgemag) + // { + // //the guest edge's start point is inside the test edge + // //guest edge point 2 + // double Dx = endpt.X - currentedge.startendpt[1].X; + // double Dy = endpt.Y - currentedge.startendpt[1].Y; + // double Dz = endpt.Z - currentedge.startendpt[1].Z; + // Vector.MemorySafe_CartVect D = new Vector.MemorySafe_CartVect(Dx, Dy, Dz); + // double Dmag = Vector.VectorMagnitude(D); + // Vector.MemorySafe_CartVect cross3 = Vector.CrossProduct(D, edgevec); + // crossmag = Vector.VectorMagnitude(cross3); + // if (crossmag == 0) + // { + // //then we know the two edges are totall parallel and lined up + // //determine if the guest edge point 2 is inside the test edge or outside of it + // double Ex = startpt.X - currentedge.startendpt[1].X; + // double Ey = startpt.Y - currentedge.startendpt[1].Y; + // double Ez = startpt.Z - currentedge.startendpt[1].Z; + // Vector.MemorySafe_CartVect E = new Vector.MemorySafe_CartVect(Ex, Ey, Ez); + // double Emag = Vector.VectorMagnitude(E); + // if (Dmag < edgemag && Emag < edgemag) + // { + // //it is inside + // kp.Value.relatedEdges.Add(currentedge); + // //but the edge is still itself unique + // currentedge.relatedEdges.Add(kp.Value); + // edgecount++; + // continue; + // } + // else + // { + // //it is outside + // kp.Value.relatedEdges.Add(currentedge); + // //but the edge is still itself unique + // currentedge.relatedEdges.Add(kp.Value); + // edgecount++; + // continue; + // } + // } + // else + // { + // //we are not parallel, so this is not an adjacency match + // edgecount++; + // continue; + // } + + // } + // } + // } + // else + // { + // //they are not even parallel, so it is likely best just to shove on + // edgecount++; + // continue; + // } + //} + #endregion + } + //this determines if it found the current edge to be unique, or not. + if (edgecount == uniqueedges.Count) + { + uniqueedges.Add(distinctedges, currentedge); + distinctedges++; + } + + } + } + return uniqueedges; + } + + private bool DoesSurfaceContainSurface(SurfaceDefinitions surface, SurfaceDefinitions testSurface, double testlengthConversion, double standardlengthConversion) + { + logger.Info("Starting to check if test surface " + testSurface.SurfaceId + " lies within surface " + surface.SurfaceId); + int coordcount = testSurface.PlCoords.Count(); + try + { + List> surfaceTriangles = new List>(); + if(IsSurfaceRegular(surface)) + { + //triangulate in a very simple way + List triangle1 = surface.PlCoords.GetRange(0, 3); + List triangle2 = surface.PlCoords.GetRange(2, 2); + triangle2.Add(surface.PlCoords[0]); //note this is a little hack to get back to the zero index. + surfaceTriangles.Add(triangle1); + surfaceTriangles.Add(triangle2); + } + else + { + //is the surface already a triangle? + if(surface.PlCoords.Count == 3) + { + logger.Info("Surface " + surface.SurfaceId + " is a triangle."); + surfaceTriangles.Add(surface.PlCoords); + } + else + { + //is the surface still a quadrilateral? + if(surface.PlCoords.Count == 4) + { + //triangulate in a very simple way (as above) + List triangle1 = surface.PlCoords.GetRange(0, 3); + List triangle2 = surface.PlCoords.GetRange(2, 2); + triangle2.Add(surface.PlCoords[0]); //note this is a little hack to get back to the zero index. + surfaceTriangles.Add(triangle1); + surfaceTriangles.Add(triangle2); + } + else + { + //perform some advanced triangulation. + logger.Info("PROGRAMMER'S NOTE: NEW CODE PATH YET TO BE DEFINED : Advanced Triangulation"); + } + + + } + } + foreach(Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) + { + foreach(List triangle in surfaceTriangles) + { + if(IsTestCoordMatchSurface(coord, triangle, surface.SurfaceId, testlengthConversion, standardlengthConversion)) + { + coordcount--; + logger.Info("Test surface coord "+ coord.X+","+coord.Y+","+coord.Z+" matches exactly."); + break; + } + else + { + //use the fact that the sum of angles for a point that subdivides a rectangle should be 2pi + Vector.CartVect p1_p = Vector.UnitVector(Vector.CreateVector(coord,triangle[0])); + Vector.CartVect p2_p = Vector.UnitVector(Vector.CreateVector(coord,triangle[1])); + Vector.CartVect p3_p = Vector.UnitVector(Vector.CreateVector(coord,triangle[2])); + + double angle_a1 = Math.Acos(p1_p.X*p2_p.X + p1_p.Y*p2_p.Y + p1_p.Z*p2_p.Z); + double angle_a2 = Math.Acos(p2_p.X*p3_p.X + p2_p.Y+p3_p.Y + p2_p.Z+p3_p.Z); + double angle_a3 = Math.Acos(p3_p.X*p1_p.X + p3_p.Y*p1_p.Y + p3_p.Z*p3_p.Z); + if((angle_a1+angle_a2+angle_a3)*180 / Math.PI < 0.01) //TODO: this is hardcoded and needs to be updated. + { + coordcount--; + logger.Info("Test surface "+ coord.X+","+coord.Y+","+coord.Z+" is inside of the test surface."); + break; + } + } + } + + } + if(coordcount == 0) return true; + else return false; + } + catch(Exception e) + { + logger.Error("Exception thrown in method Does SurfaceContainSurface."); + return false; + } + } + + + private DOEgbXMLReportingObj GetPossibleSurfaceMatches(SurfaceDefinitions surface, List TestSurfaces, DOEgbXMLReportingObj report, Conversions.lengthUnitEnum standardLengthUnits, Conversions.lengthUnitEnum testLengthUnits, double testlengthConversion, double standardlengthConversion, Conversions.areaUnitEnum standardAreaUnits, Conversions.areaUnitEnum testAreaUnits, double testareaConversion, double standardareaConversion) + { + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Mar 14 2013 + report.testSummary = "This test tries to match each Surface element in the standard file with an equivalent in your test file"; + report.testSummary += " To be as flexible about what constitutes a \"Good Match\", this test finds a pool of possible candidate "; + report.testSummary += "surfaces in your test file and then begins to eliminate them as they fail different tests."; + report.testSummary += " At the end, there should be only one surface candidate remaining that constitutes a good match. "; + report.testSummary += "You can see the result of this filtering process by reviewing the mini-report that is provided for you below."; + report.testSummary += "
"; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added March 14 2013 + report.testSummary += " The search routine first tries to find all surfaces that have the same SurfaceType and adjacentSpaceIds."; + report.testSummary += " Everytime there is a match found in the test file, meeting these criteria, a message will appear in the "; + report.testSummary += "mini-report, indicating that a match has been found."; + report.testSummary += " There may be more than one match in your test file."; + report.testSummary += " If there are no matches found for SurfaceType and AdjacencyId, this message will be printed (and the test will end as failed):"; + report.testSummary += " In the test file, no matches could be found in the standard file that have the same AdjacentSpaceId(s) and SurfaceType."; + report.testSummary += "
"; + report.testSummary += " If this set of tests is successful, the routine next tries to remove those surfaces that do not meet"; + report.testSummary += " the tilt and azimuth tolerances. Let's pretend for example that the tilt and azimuth for the standard surface"; + report.testSummary += " in question are both 90 degrees. If the tilt and azimuth test tolerance are 1 degree, then the search"; + report.testSummary += " routine will only keep those walls that have 89<=tilt<=91 && <=89azimuth<=91 && match the SurfaceType and"; + report.testSummary += " adjacency relationships."; + report.testSummary += " The mini-report will let you know which surfaces pass the tilt and azimuth test and which do not."; + report.testSummary += "
"; + report.testSummary += " Next the search routine takes any of the remaining surface candidates that have passed all the tests so far, "; + report.testSummary += "and tries to determine if the Surface Areas defined by the polyLoops match to within a pre-defined % tolerance."; + report.testSummary += "
"; + report.testSummary += " the final tests are to physically test the coordinates of the polyloop and insertion point to make sure"; + report.testSummary += " that a match for the standard surface can be found."; + report.testSummary += " You should see additional messages telling you which surface in your test file matches, or doesn't match"; + report.testSummary += " the standard surface being searched against. If there is no match, the mini-report tells you."; + report.testSummary += " By making the tests this way, it is hoped that you can see exactly why your test file is failing against"; + report.testSummary += " the standard file's surface definitions."; + + try + { + report.MessageList.Add("Standard Surface Id: " + surface.SurfaceId); + report.MessageList.Add("
"); + //initialize the return list + //alternate between these two to filter out bad matches + List possiblesList1 = new List(); + List possiblesList2 = new List(); + + bool adjSpaceIdMatch = false; + bool isLowTiltObject = false; + bool isHighTiltObject = false; + bool interiorWallFlipped = false; + bool issurfaceRegular = false; + bool istestSurfaceRegular = false; + //try to find a surface in the test file that has the same: + //adjacent space Id signature + //surfaceType + //free list is 1 + //list 2 is not used + for(int ts = 0; ts"); + possiblesList1.Add(testSurface); + } + } + else + { + if(IsLowTiltSurface(surface)) isLowTiltObject = true; + if (IsHighTiltSurface(surface)) isHighTiltObject = true; + if(surface.SurfaceType == testSurface.SurfaceType) + { + report.MessageList.Add("AdjancentSpaceId(s) and surfaceType Match."); + report.MessageList.Add("Surface id: " + testSurface.SurfaceId + " is a candidate."); + report.MessageList.Add("
"); + possiblesList1.Add(testSurface); + } + } + + } + } + } + if (possiblesList1.Count == 1) + { + report.MessageList.Add("Based on a comparison of the surface Type and Adjacent SpaceIds, there is " + possiblesList1.Count.ToString() + " surface in the test file that is a possible match for " + surface.SurfaceId + " of the Standard File."); + report.MessageList.Add("
"); + } + else if (possiblesList1.Count > 1) + { + report.MessageList.Add("Based on a comparison of the surface Type and Adjacent SpaceIds, there are " + possiblesList1.Count.ToString() + " surface in the test file that are possible matches for " + surface.SurfaceId + " of the Standard File."); + report.MessageList.Add("
"); + } + else + { + report.longMsg = "In the test file, no matches could be found in the standard file that have the same AdjacentSpaceId(s) and SurfaceType."; + report.passOrFail = false; + return report; + } + //begin to filter back this list + //tilt + //azimuth + //list 1 is analyzed + //list 2 is free + + if (possiblesList1.Count > 0) + { + foreach (SurfaceDefinitions testSurface in possiblesList1) + { + double tiltDifference = 0; + double azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); + if(isLowTiltObject) + { + if(IsLowTiltSurface(testSurface)) //they are the same, both have small tils + { + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + } + else //they are 180 degrees different, and the test surface is a high tilt while the standard is low tilt + { + if (testSurface.SurfaceType == "InteriorFloor") + { + tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); + } + else + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + } + + //no azimuth tests + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces + { + if(surface.Tilt != 0) + { + if(azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + } + else + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + if (surface.Tilt == 0) + { + possiblesList2.Add(testSurface); + if (tiltDifference == 0) + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + else + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + } + else + { + //check the azimuth + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0 && azimuthDifference == 0) + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + else + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + } + } + } + } + else if (isHighTiltObject) + { + if(IsHighTiltSurface(testSurface)) //both high tilt interior surfaces + { + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + } + else //standard is high tilt, test is low tilt + { + if(testSurface.SurfaceType == "Ceiling") + { + tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); + } + else + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + } + + //no azimuth tests + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0) + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + else + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + } + } + else + { + azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); + if (interiorWallFlipped) //both high tilt interior surfaces + { + + azimuthDifference = Math.Abs(Math.Abs(testSurface.Azimuth - surface.Azimuth) - 180); //180 is needed because they should be separated by 180 + } + tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); + + //if the tilt and azimuth is outside of tolerance + if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); + report.MessageList.Add("
"); + continue; + } + //if the tilt and azimuth is within tolerance + else + { + //add to the free List + possiblesList2.Add(testSurface); + if (tiltDifference == 0 && azimuthDifference == 0) + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + else + { + report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); + report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); + report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); + + report.MessageList.Add("
"); + } + } + } + + } + } + // report to the user that no matches could be found + else + { + report.longMsg = "In the test file, surfaces could be found that match the standard file's AdjacentSpaceId and SurfaceType, but of these matches, none could be identified that also have a tilt or azimuth that exactly matches the standard file's, or is within the allowable tolerance."; + report.passOrFail = false; + return report; + } + + //clear the first list + possiblesList1.Clear(); + //start to loop through the new refined list + //generally want to look at the polyLoop coordinates + //list 2 is analyzed + //list 1 is free + report.MessageList.Add("Starting Surface Area Match tests......"); + report.MessageList.Add("
"); + if (possiblesList2.Count > 0) + { + //simple method from this point forward is just to simply start doing a polyloop check + //check the standard surface PolyLoop and the test Surface(s) polyloop(s) + //check the absolute coordinates of the testSurface(s) polyloop(s) + + if (possiblesList2.Count == 1) + { + report.MessageList.Add("Only one Surface Candidate remaining from the original test pool."); + report.MessageList.Add("
"); + //meaning there is only one candidate still available + //go on to test the polyLoop coordinates and the insertion point + possiblesList1.Add(possiblesList2[0]); + + + } + //more than one candidate still exists even after the adjacency test, surfaceType test, and tilt and azimuth tests, so filter through + else + { + //The user should be able to determine, based on output which surfaces are left for consideration + //Option 1: (easiest) find the one best candidate + //do so based on an area match, matching the area of the test surface with the area of the test surface + //(without regard for absolute polyloop coordinates) + + //We find the area using area formulas for both regular polygons and irregular polygons + //first we check for the type of surface that it is (regular polygon or not), and we then take it from there + //in the case of a rectangular polygon, we only count rectangles or squares as regular, everything else is + //assumed to be irregular, though this does not fit the classic definition of a classic polygon. + //The language is just semantics + + //first try to find if the standard file has a regular rectangular or square profile + report.MessageList.Add("Checking if the surface is a square or rectangle."); + issurfaceRegular = IsSurfaceRegular(surface); + foreach (SurfaceDefinitions regSurface in possiblesList2) + { + //ensures likewise that all the test surface candidates are regular, + //TODO: if they are not, then the entire set is assumed to be irregular (this could be improved) + istestSurfaceRegular = IsSurfaceRegular(regSurface); + if (istestSurfaceRegular == false) break; + } + if (issurfaceRegular && istestSurfaceRegular) + { + //we take a shortcut and use the width and height as a way to simplify the area checking scheme + //we assume that the width and height are properly entered in this simplified case + report.MessageList.Add("Rectangle or Square = TRUE"); + report.MessageList.Add("Comparisons of the Width and Height values will be used as a proxy for surface Area."); + foreach (SurfaceDefinitions testsurface in possiblesList2) + { + //it first analyzes the test file to see if slivers are present. If they are, it will fail the test + //if slivers are not allowed for the test. This is the first time we check for slivers + //TODO: consider removing or giving a feature to allow this to be overridded. + if (testsurface.Width <= DOEgbXMLBasics.Tolerances.SliverDimensionTolerance || testsurface.Height <= DOEgbXMLBasics.Tolerances.SliverDimensionTolerance) + { + if (!DOEgbXMLBasics.SliversAllowed) + { + report.MessageList.Add("This test does not allow slivers less than " + DOEgbXMLBasics.Tolerances.SliverDimensionTolerance + " ft. A sliver has been detected. Test surface id: " + testsurface.SurfaceId + " is a sliver."); + report.passOrFail = false; + return report; + } + } + //otherwise, if the sliver test passes + double widthDiff = Math.Abs((testlengthConversion * testsurface.Width) - surface.Width * standardlengthConversion); + if(widthDiff > DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance) + { + widthDiff = Math.Abs((testlengthConversion * testsurface.Height) - surface.Width * standardlengthConversion); + if(widthDiff < DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance) + { + //we will swap them + double heightDiff = Math.Abs((testlengthConversion * testsurface.Width) - surface.Height * standardlengthConversion); + if (heightDiff > DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testsurface.SurfaceId + " width and height do not both match the standard file surface id: " + surface.SurfaceId + ". This surface has been removed as a candidate."); + continue; + } + else + { + //this surface is a candidate + possiblesList1.Add(testsurface); + if (widthDiff == 0 && heightDiff == 0) + { + report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " have has the width and height swapped, but the width and height exactly match the standard file."); + //go ahead and now check the polyLoop coordinates, and then the insertion point + } + else + { + report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " have been swapped, but are within the width and height tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance + standardLengthUnits + " and " + DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance + standardLengthUnits + ", respectively."); + //go ahead and now check the polyloop coordinates, and then the insertion point + } + } + } + } + else + { + //we won't swap them + double heightDiff = Math.Abs((testlengthConversion * testsurface.Height) - surface.Height * standardlengthConversion); + if (widthDiff > DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance || + heightDiff > DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testsurface.SurfaceId + " width and height do not both match the standard file surface id: " + surface.SurfaceId + ". This surface has been removed as a candidate."); + continue; + } + else + { + //this surface is a candidate + possiblesList1.Add(testsurface); + if (widthDiff == 0 && heightDiff == 0) + { + report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " matches the width and height exactly of the standard file."); + //go ahead and now check the polyLoop coordinates, and then the insertion point + } + else + { + report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " is within the width and height tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance + standardLengthUnits + " and " + DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance + standardLengthUnits + ", respectively."); + //go ahead and now check the polyloop coordinates, and then the insertion point + } + } + } + } + } + //It is not "regular". Find the one surface with the area that most closely matches, and then check its polyloops + //1. get the polyloop area of the standard file's surface polyloops + //2. get the area of the test file surface candidates using the polyloop coordinates + else + { + report.MessageList.Add("The surface is not a square or rectangle."); + report.MessageList.Add("PolyLoop coordinates will be used to calculate the area."); + //there are two basic cases, one where we get the area using greens theorem when the surface is parallel + //to one of the axes of the project global reference frame + //and the second where the surface is not parallel to one of the axes of the global reference frame + //Surface normal Parallel to global reference frame X Axis + if (Math.Abs(surface.PlRHRVector.X) == 1 && surface.PlRHRVector.Y == 0 && surface.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the Y and Z coordinates and throw out the X because we can assume that they are all the same + //create new + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); + coordList.Add(c2); + + } + double area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + double testSurfacesArea = 0; + + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + if (Math.Abs(testSurface.PlRHRVector.X) == 1 && testSurface.PlRHRVector.Y == 0 && + testSurface.PlRHRVector.Z == 0) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) + { + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); + testCoordList.Add(c2); + } + testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); + if (testSurfacesArea == -999) + { + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + } + //convert from the test units to the standard units + + double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); + if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) + { + possiblesList1.Add(testSurface); + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); + } + else + { + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); + } + } + else + { + report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); + //don't return here, it will be returned below + } + } + else + { + //do nothing, it will be handled by the more general case and then translated to a 2-D surface + } + } + + + } + //Surface normal Parallel to global reference frame y Axis + else if (surface.PlRHRVector.X == 0 && Math.Abs(surface.PlRHRVector.Y) == 1 && surface.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the X and Z coordinates and throw out the Y because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); + coordList.Add(c2); + + } + double area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + double testSurfacesArea = 0; + + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + if (Math.Abs(testSurface.PlRHRVector.X) == 0 && Math.Abs(testSurface.PlRHRVector.Y) == 1 && testSurface.PlRHRVector.Z == 0) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) + { + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); + testCoordList.Add(c2); + } + testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); + if (testSurfacesArea == -999) + { + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + } + //convert the testSurfaceArea + double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); + if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) + { + possiblesList1.Add(testSurface); + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); + } + else + { + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); + } + } + else + { + report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); + //don't return here, it will be returned below + } + } + else + { + //do nothing, it will be handled by the more general code below and translated to 2D + } + } + } + else if (surface.PlRHRVector.X == 0 && surface.PlRHRVector.Y == 0 && Math.Abs(surface.PlRHRVector.Z) == 1) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the X and Y coordinates and throw out the Z because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); + coordList.Add(c2); + + } + double area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + double testSurfacesArea = 0; + + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + if (Math.Abs(testSurface.PlRHRVector.X) == 0 && testSurface.PlRHRVector.Y == 0 && Math.Abs(testSurface.PlRHRVector.Z) == 1) + { + List testCoordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) + { + + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); + testCoordList.Add(c2); + } + testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); + if (testSurfacesArea == -999) + { + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + } + //provide area conversion + double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); + if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) + { + possiblesList1.Add(testSurface); + if (difference == 0) + { + //then it perfectly matches, go on to check the poly loop coordinates + //then check the insertion point + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); + } + else + { + report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); + } + } + else + { + report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); + //don't return here, it will be returned below + } + } + else + { + //do nothing. The code below will handle the more general case where it is not aligned with reference frame axes + } + } + } + //the surface is not aligned with one of the reference frame axes, which requires a bit more work to determine the right answer. + else + { + report.MessageList.Add("The standard surface is not aligned along an axis, and will be rotated into a new coordinate frame"); + //New Z Axis for this plane is the normal vector, does not need to be created + //Get New Y Axis which is the surface Normal Vector cross the original global reference X unit vector (all unit vectors please + Vector.CartVect globalReferenceX = new Vector.CartVect(); + globalReferenceX.X = 1; + globalReferenceX.Y = 0; + globalReferenceX.Z = 0; + Vector.MemorySafe_CartVect localY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(surface.PlRHRVector, globalReferenceX)); + + + //new X axis is the localY cross the surface normal vector + Vector.MemorySafe_CartVect localX = Vector.UnitVector(Vector.CrossProduct(localY, surface.PlRHRVector)); + + //convert the polyloop coordinates to a local 2-D reference frame + //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane + List translatedCoordinates = new List(); + Vector.MemorySafe_CartCoord newOrigin = new Vector.MemorySafe_CartCoord(0,0,0); + translatedCoordinates.Add(newOrigin); + for (int j = 1; j < surface.PlCoords.Count; j++) + { + //randomly assigns the first polyLoop coordinate as the origin + Vector.MemorySafe_CartCoord origin = surface.PlCoords[0]; + //captures the components of a vector drawn from the new origin to the + Vector.CartVect distance = new Vector.CartVect(); + distance.X = surface.PlCoords[j].X - origin.X; + distance.Y = surface.PlCoords[j].Y - origin.Y; + distance.Z = surface.PlCoords[j].Z - origin.Z; + + //x coordinate is distance vector dot the new local X axis + double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; + //y coordinate is distance vector dot the new local Y axis + double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; + double tZ = 0; + Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); + translatedCoordinates.Add(translatedPt); + + } + double area = GetAreaFrom2DPolyLoop(translatedCoordinates); + if (area == -999) + { + report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); + + } + //get the area of the test candidates using the polyloop coordinates + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + Vector.CartVect testglobalReferenceX = new Vector.CartVect(); + globalReferenceX.X = 1; + globalReferenceX.Y = 0; + globalReferenceX.Z = 0; + Vector.MemorySafe_CartVect testlocalY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(surface.PlRHRVector, testglobalReferenceX)); + + //new X axis is the localY cross the surface normal vector + Vector.MemorySafe_CartVect testlocalX = Vector.UnitVector(Vector.CrossProduct(testlocalY, surface.PlRHRVector)); + + //convert the polyloop coordinates to a local 2-D reference frame + //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane + List testtranslatedCoordinates = new List(); + Vector.MemorySafe_CartCoord newOriginTest = new Vector.MemorySafe_CartCoord(0,0,0); + testtranslatedCoordinates.Add(newOriginTest); + for (int j = 1; j < surface.PlCoords.Count; j++) + { + //randomly assigns the first polyLoop coordinate as the origin + Vector.MemorySafe_CartCoord origin = testSurface.PlCoords[0]; + //captures the components of a vector drawn from the new origin to the + Vector.CartVect distance = new Vector.CartVect(); + distance.X = testSurface.PlCoords[j].X - origin.X; + distance.Y = testSurface.PlCoords[j].Y - origin.Y; + distance.Z = testSurface.PlCoords[j].Z - origin.Z; + + //x coordinate is distance vector dot the new local X axis + double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; + //y coordinate is distance vector dot the new local Y axis + double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; + double tZ = 0; + Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); + testtranslatedCoordinates.Add(translatedPt); + + } + double testarea = GetAreaFrom2DPolyLoop(translatedCoordinates); + if (testarea == -999) + { + report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); + report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); + } + //convert to the standard units + double difference = Math.Abs((area*standardareaConversion) - (testarea * testareaConversion)); + if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) + { + possiblesList1.Add(testSurface); + //within reason + if (difference == 0) + { + report.MessageList.Add + ("The test surface: " + testSurface.SurfaceId + + " polyloop surface area matches the polyLoop surface area of the standard surface: " + + surface.SurfaceId + " exactly."); + } + else + { + report.MessageList.Add + ("The test surface: " + testSurface.SurfaceId + + " polyloop surface area matches the polyLoop surface area of the standard surface: " + + surface.SurfaceId + " within the allowable area percentage tolerance."); + } + } + else + { + //not within reason, so the test will fail + //don't return yet, it will be returned below when possiblesList1 is found empty + } + } + } + } + } + + possiblesList2.Clear(); + //polyLoop absolute coordinates + //list 1 is analyzed + //list 2 is free + report.MessageList.Add("
"); + report.MessageList.Add("Starting PolyLoop coordinate comparisons......."); + report.MessageList.Add("
"); + if (possiblesList1.Count > 0) + { + + foreach (SurfaceDefinitions testSurface in possiblesList1) + { + //check the polyLoop coordinates + foreach (Vector.MemorySafe_CartCoord standardPolyLoopCoord in surface.PlCoords) + { + report = GetPolyLoopCoordMatch(standardPolyLoopCoord, testSurface, report, surface.SurfaceId, testlengthConversion, standardlengthConversion); + if (report.passOrFail) + { + continue; + } + else + { + report.MessageList.Add("Could not find a coordinate match in the test surface polyloop."); + break; + } + } + if (report.passOrFail) + { + possiblesList2.Add(testSurface); + } + } + } + else + { + report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, and Surface Area. Failed when attempting to match the surface area."; + report.passOrFail = false; + return report; + } + possiblesList1.Clear(); + report.MessageList.Add("
"); + if(!isHighTiltObject && !isLowTiltObject && issurfaceRegular) //no point in doing this if thing is not square and regular + { + report.MessageList.Add("Starting Insertion Point Coordinate comparisons......."); + report.MessageList.Add("
"); + if (possiblesList2.Count > 0) + { + //check the insertion point coordinate + foreach (SurfaceDefinitions testSurface in possiblesList2) + { + //now match the differences + double insPtXDiff = Math.Abs((testSurface.InsertionPoint.X * testlengthConversion) - (surface.InsertionPoint.X*standardlengthConversion)); + double insPtYDiff = Math.Abs((testSurface.InsertionPoint.Y * testlengthConversion) - (surface.InsertionPoint.Y*standardlengthConversion)); + double insPtZDiff = Math.Abs((testSurface.InsertionPoint.Z * testlengthConversion) - (surface.InsertionPoint.Z*standardlengthConversion)); + if(interiorWallFlipped) + { + report.MessageList.Add("The azimuths are flipped. Looking to see if the test surface has properly defined the insertion point it has."); + report.MessageList.Add("
"); + //find the complimenting insertion point + for(int pt = 0; pt DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance || insPtYDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance || insPtZDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) + { + report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " insertion point coordinates do not both match the standard file surface id: " + surface.SurfaceId + ". It has been removed as a candidate."); + continue; + } + else + { + //possible match + possiblesList1.Add(testSurface); + if (insPtXDiff == 0 && insPtYDiff == 0 && insPtZDiff == 0) + { + //perfect match + report.MessageList.Add("Test file's Surface with id: " + testSurface.SurfaceId + " matches the insertion point in the standard file exactly."); + } + else + { + //perfect match + report.MessageList.Add(" Test file's Surface with id: " + testSurface.SurfaceId + " has an insertion point that is within the allowable tolerances of X:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance + " ft, Y:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance + "ft, Z:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance + "ft."); + } + } + + } + } + else + { + report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, and PolyLoop Coordinates. Failed when matching PolyLoop coordinates."; + report.passOrFail = false; + return report; + } + if (possiblesList1.Count == 1) + { + report.longMsg = "Advanced Surface Test found a match for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed. Surface id: " + possiblesList2[0].SurfaceId + "."; + report.passOrFail = true; + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + + globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + return report; + } + else if (possiblesList1.Count == 0) + { + report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."; + report.passOrFail = false; + return report; + } + else if (possiblesList1.Count > 1) + { + report.longMsg = "Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."; + report.passOrFail = false; + //List testFileSurfIds = new List(); + //foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + //report.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + return report; + } + return report; + } + else + { + //we do not conduct insertion point tests for horizontal surfaces + if (possiblesList2.Count == 1) + { + report.longMsg = "Advanced Surface Test found a match for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed. Surface id: " + possiblesList2[0].SurfaceId + "."; + report.passOrFail = true; + List testFileSurfIds = new List(); + foreach (SurfaceDefinitions surf in possiblesList2) { testFileSurfIds.Add(surf.SurfaceId); } + + globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + return report; + } + else if (possiblesList2.Count == 0) + { + report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."; + report.passOrFail = false; + return report; + } + else if (possiblesList2.Count > 1) + { + report.longMsg = "Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."; + report.passOrFail = false; + //List testFileSurfIds = new List(); + //foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } + //report.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); + return report; + } + return report; + } + } + return report; + + } + catch (Exception e) + { + report.longMsg = (e.ToString()); + return report; + } + } + + //designed to be passed a pre-vetted surface definition from a Standard File + private static bool IsLowTiltSurface(SurfaceDefinitions surface) + { + bool isLowTilt = false; + if (surface.SurfaceType == "Ceiling" || surface.SurfaceType == "Roof" || surface.SurfaceType == "UndergroundCeiling") + { + isLowTilt = true; + } + return isLowTilt; + } + + private static bool IsHighTiltSurface(SurfaceDefinitions surface) + { + bool isHighTilt = false; + if (surface.SurfaceType == "InteriorFloor" || surface.SurfaceType == "RaisedFloor" || surface.SurfaceType == "SlabOnGrade" || surface.SurfaceType == "UndergroundSlab" || surface.SurfaceType == "ExposedFloor") isHighTilt = true; + return isHighTilt; + } + + //method to determine if test coord matches the surface's coordinates within a given tolerance + private static bool IsTestCoordMatchSurface(Vector.MemorySafe_CartCoord testCoord, List surfaceCoords, string standardSurfaceId, double testlengthConversion, double standardlengthConversion) + { + List possibleMatch = new List(); + List exactMatch = new List(); + logger.Info("Testing Polyloop coordinates for Standard surface " + standardSurfaceId); + logger.Info(" X: " + testCoord.X.ToString() + ", Y: " + testCoord.Y.ToString() + ", Z: " + testCoord.Z.ToString()); + foreach (Vector.MemorySafe_CartCoord PolyLoopCoord in surfaceCoords) + { + + //find an appropriate match + double diffX = Math.Abs((PolyLoopCoord.X * standardlengthConversion) - (testCoord.X * testlengthConversion)); + if (diffX < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //found a perfect X Match + if (diffX == 0) + { + //test Y + double diffY = Math.Abs((PolyLoopCoord.Y * standardlengthConversion) - (testCoord.Y * testlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((PolyLoopCoord.Z * standardlengthConversion) - (testCoord.Z * testlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " exactly"); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + exactMatch.Add(PolyLoopCoord); + } + else + { + //not a perfect Z match but within bounds + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X and Y coordinates exactly. Z coordinate within allowable tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + } + else + { + //z coordinate not within tolerance + continue; + } + } + //Y Match is within the allowable tolerance + else + { + double diffZ = Math.Abs((PolyLoopCoord.Z * standardlengthConversion) - (testCoord.Z * testlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " in the X and Z coordinates, exactly. Y coordinate is within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + else + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X exactly. Y and Z coordinates are within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + } + else + { + //z coordinate is not within tolerance + continue; + } + } + } + else + { + //a y match could not be found within tolerance + continue; + } + + } + else + { + //not a perfect X match, but within tolerance + //test Y + double diffY = Math.Abs((PolyLoopCoord.Y * standardlengthConversion) - (testCoord.Y * testlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((PolyLoopCoord.Z * standardlengthConversion) - (testCoord.Z * testlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y and Z coordinate exactly. X is within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + else + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y coordinate exactly. X and Z is within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + } + else + { + //z is not matched so continue + continue; + } + } + // the Y match is not perfect but within tolerance + else + { + double diffZ = Math.Abs((PolyLoopCoord.Z * standardlengthConversion) - (testCoord.Z * testlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Z coordinate exactly. The X and Y coordinates are within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + else + { + logger.Info("Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + ". The X, Y, and Z coordinates are within tolerance."); + logger.Info(" X: " + PolyLoopCoord.X.ToString() + ", Y: " + PolyLoopCoord.Y.ToString() + ", Z: " + PolyLoopCoord.Z.ToString()); + possibleMatch.Add(PolyLoopCoord); + } + } + // no match found for the Z + else + { + continue; + } + } + } + //no match could be found for the Y + else + { + continue; + } + } + } + else + { + //not a match found for the X and continue + continue; + } + } + if (exactMatch.Count > 1) + { + logger.Info("PROGRAMMER'S NOTE: Error, overlapping polyLoop coordinates found in the Test Surface PolyLoop."); + return false; + } + else if (exactMatch.Count == 1) + { + logger.Info("PROGRAMMER'S NOTE: One coordinate candidate found. Exact match"); + return true; + } + if (possibleMatch.Count > 1) + { + logger.Info("PROGRAMMER'S NOTE: No exact solution for a match of the polyLoop coordinate. More than one coordinate candidate found."); + return false; + } + else if (possibleMatch.Count == 1) + { + logger.Info("PROGRAMMER'S NOTE: One coordinate candidate found."); + return true; + } + else + { + logger.Info("PROGRAMMER'S NOTE: No coordinate candidate found."); + return false; + } + + } + + private static bool GetPolyLoopCoordMatch(Vector.MemorySafe_CartCoord standardPolyLoopCoord, SurfaceDefinitions testSurface, string standardSurfaceId, double testlengthConversion, double standardlengthConversion) + { + List possibleMatch = new List(); + List exactMatch = new List(); + logger.Debug("START SUBROUTINE: GetPolyLoopCoordMatch"); + logger.Debug(standardSurfaceId +"Coordinates: "+" X: " + standardPolyLoopCoord.X.ToString() + ", Y: " + standardPolyLoopCoord.Y.ToString() + ", Z: " + standardPolyLoopCoord.Z.ToString()); + foreach (Vector.MemorySafe_CartCoord testPolyLoopCoord in testSurface.PlCoords) + { + + //find an appropriate match + double diffX = Math.Abs((testPolyLoopCoord.X * testlengthConversion) - (standardPolyLoopCoord.X * standardlengthConversion)); + if (diffX < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //found a perfect X Match + if (diffX == 0) + { + //test Y + double diffY = Math.Abs((testPolyLoopCoord.Y * testlengthConversion) - (standardPolyLoopCoord.Y * standardlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z * standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Debug("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " exactly"); + logger.Debug("Test Surface " + testSurface.SurfaceId); + logger.Debug(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + exactMatch.Add(testPolyLoopCoord); + } + else + { + //not a perfect Z match but within bounds + logger.Debug("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X and Y coordinates exactly. Z coordinate within allowable tolerance."); + logger.Debug("Test Surface " + testSurface.SurfaceId); + logger.Debug(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate not within tolerance + continue; + } + } + //Y Match is within the allowable tolerance + else + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z * standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Debug("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " in the X and Z coordinates, exactly. Y coordinate is within tolerance."); + logger.Debug("Test Surface " + testSurface.SurfaceId); + logger.Debug(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + logger.Debug("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X exactly. Y and Z coordinates are within tolerance."); + logger.Debug("Test Surface " + testSurface.SurfaceId); + logger.Debug(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate is not within tolerance + continue; + } + } + } + else + { + //a y match could not be found within tolerance + continue; + } + + } + else + { + //not a perfect X match, but within tolerance + //test Y + double diffY = Math.Abs((testPolyLoopCoord.Y * testlengthConversion) - (standardPolyLoopCoord.Y * standardlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z * standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y and Z coordinate exactly. X is within tolerance."); + logger.Info("Test Surface " + testSurface.SurfaceId); + logger.Info(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + logger.Info("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y coordinate exactly. X and Z is within tolerance."); + logger.Info("Test Surface " + testSurface.SurfaceId); + logger.Info(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z is not matched so continue + continue; + } + } + // the Y match is not perfect but within tolerance + else + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z * standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + logger.Info("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Z coordinate exactly. The X and Y coordinates are within tolerance."); + logger.Info("Test Surface " + testSurface.SurfaceId); + logger.Info(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + logger.Info("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + ". The X, Y, and Z coordinates are within tolerance."); + logger.Info("Test Surface " + testSurface.SurfaceId); + logger.Info(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + // no match found for the Z + else + { + continue; + } + } + } + //no match could be found for the Y + else + { + continue; + } + } + } + else + { + //not a match found for the X and continue + continue; + } + } + if (exactMatch.Count > 1) + { + logger.Info("Error, overlapping polyLoop coordinates found in the Test Surface PolyLoop."); + return false; + } + else if (exactMatch.Count == 1) + { + logger.Info("One coordinate candidate found. Exact match"); + return true; + } + if (possibleMatch.Count > 1) + { + logger.Info("No exact solution for a match of the polyLoop coordinate. More than one coordinate candidate found."); + return false; + } + else if (possibleMatch.Count == 1) + { + logger.Info("One coordinate candidate found."); + return true; + } + else + { + logger.Info("No coordinate candidate found."); + return false; + } + + } + + private static DOEgbXMLReportingObj GetPolyLoopCoordMatch(Vector.MemorySafe_CartCoord standardPolyLoopCoord, SurfaceDefinitions testSurface, DOEgbXMLReportingObj report, string standardSurfaceId, double testlengthConversion, double standardlengthConversion) + { + List possibleMatch = new List(); + List exactMatch = new List(); + report.MessageList.Add("Testing Polyloop coordinates for Standard surface " + standardSurfaceId); + report.MessageList.Add(" X: " + standardPolyLoopCoord.X.ToString() + ", Y: " + standardPolyLoopCoord.Y.ToString() + ", Z: " + standardPolyLoopCoord.Z.ToString()); + foreach (Vector.MemorySafe_CartCoord testPolyLoopCoord in testSurface.PlCoords) + { + + //find an appropriate match + double diffX = Math.Abs((testPolyLoopCoord.X * testlengthConversion) - (standardPolyLoopCoord.X * standardlengthConversion)); + if (diffX < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //found a perfect X Match + if (diffX == 0) + { + //test Y + double diffY = Math.Abs((testPolyLoopCoord.Y * testlengthConversion) - (standardPolyLoopCoord.Y*standardlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z*standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " exactly"); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + exactMatch.Add(testPolyLoopCoord); + } + else + { + //not a perfect Z match but within bounds + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X and Y coordinates exactly. Z coordinate within allowable tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate not within tolerance + continue; + } + } + //Y Match is within the allowable tolerance + else + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z*standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " in the X and Z coordinates, exactly. Y coordinate is within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " X exactly. Y and Z coordinates are within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate is not within tolerance + continue; + } + } + } + else + { + //a y match could not be found within tolerance + continue; + } + + } + else + { + //not a perfect X match, but within tolerance + //test Y + double diffY = Math.Abs((testPolyLoopCoord.Y * testlengthConversion) - (standardPolyLoopCoord.Y*standardlengthConversion)); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z*standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y and Z coordinate exactly. X is within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Y coordinate exactly. X and Z is within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z is not matched so continue + continue; + } + } + // the Y match is not perfect but within tolerance + else + { + double diffZ = Math.Abs((testPolyLoopCoord.Z * testlengthConversion) - (standardPolyLoopCoord.Z*standardlengthConversion)); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + " Z coordinate exactly. The X and Y coordinates are within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test Surface " + testSurface.SurfaceId + ": Found polyLoop coordinate that matches Standard Surface " + standardSurfaceId + ". The X, Y, and Z coordinates are within tolerance."); + report.MessageList.Add("Test Surface " + testSurface.SurfaceId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + // no match found for the Z + else + { + continue; + } + } + } + //no match could be found for the Y + else + { + continue; + } + } + } + else + { + //not a match found for the X and continue + continue; + } + } + if (exactMatch.Count > 1) + { + report.MessageList.Add("Error, overlapping polyLoop coordinates found in the Test Surface PolyLoop."); + report.passOrFail = false; + return report; + } + else if (exactMatch.Count == 1) + { + report.MessageList.Add("One coordinate candidate found. Exact match"); + report.passOrFail = true; + return report; + } + if (possibleMatch.Count > 1) + { + report.MessageList.Add("No exact solution for a match of the polyLoop coordinate. More than one coordinate candidate found."); + report.passOrFail = false; + return report; + } + else if (possibleMatch.Count == 1) + { + report.MessageList.Add("One coordinate candidate found."); + report.passOrFail = true; + return report; + } + else + { + report.MessageList.Add("No coordinate candidate found."); + report.passOrFail = false; + return report; + } + + } + + private static DOEgbXMLReportingObj GetOpeningPolyLoopCoordMatch(Vector.MemorySafe_CartCoord standardPolyLoopCoord, OpeningDefinitions testOpening, DOEgbXMLReportingObj report, string standardOpeningId) + { + List possibleMatch = new List(); + List exactMatch = new List(); + report.MessageList.Add("Testing Polyloop coordinates for Standard opening " + standardOpeningId); + report.MessageList.Add(" X: " + standardPolyLoopCoord.X.ToString() + ", Y: " + standardPolyLoopCoord.Y.ToString() + ", Z: " + standardPolyLoopCoord.Z.ToString()); + foreach (Vector.MemorySafe_CartCoord testPolyLoopCoord in testOpening.PlCoords) + { + + //find an appropriate match + double diffX = Math.Abs(testPolyLoopCoord.X - standardPolyLoopCoord.X); + if (diffX < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //found a perfect X Match + if (diffX == 0) + { + //test Y + double diffY = Math.Abs(testPolyLoopCoord.Y - standardPolyLoopCoord.Y); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs(testPolyLoopCoord.Z - standardPolyLoopCoord.Z); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " exactly"); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + exactMatch.Add(testPolyLoopCoord); + } + else + { + //not a perfect Z match but within bounds + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " X and Y coordinates exactly. Z coordinate within allowable tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate not within tolerance + continue; + } + } + //Y Match is within the allowable tolerance + else + { + double diffZ = Math.Abs(testPolyLoopCoord.Z - standardPolyLoopCoord.Z); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " in the X and Z coordinates, exactly. Y coordinate is within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " X exactly. Y and Z coordinates are within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z coordinate is not within tolerance + continue; + } + } + } + else + { + //a y match could not be found within tolerance + continue; + } + + } + else + { + //not a perfect X match, but within tolerance + //test Y + double diffY = Math.Abs(testPolyLoopCoord.Y - standardPolyLoopCoord.Y); + if (diffY < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Y Match + if (diffY == 0) + { + double diffZ = Math.Abs(testPolyLoopCoord.Z - standardPolyLoopCoord.Z); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " Y and Z coordinate exactly. X is within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test Opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " Y coordinate exactly. X and Z is within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + else + { + //z is not matched so continue + continue; + } + } + // the Y match is not perfect but within tolerance + else + { + double diffZ = Math.Abs(testPolyLoopCoord.Z - standardPolyLoopCoord.Z); + if (diffZ < DOEgbXMLBasics.Tolerances.SurfacePLCoordTolerance) + { + //perfect Z match + if (diffZ == 0) + { + report.MessageList.Add("Test opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + " Z coordinate exactly. The X and Y coordinates are within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + else + { + report.MessageList.Add("Test opening " + testOpening.OpeningId + ": Found polyLoop coordinate that matches Standard Opening " + standardOpeningId + ". The X, Y, and Z coordinates are within tolerance."); + report.MessageList.Add("Test Opening " + testOpening.OpeningId); + report.MessageList.Add(" X: " + testPolyLoopCoord.X.ToString() + ", Y: " + testPolyLoopCoord.Y.ToString() + ", Z: " + testPolyLoopCoord.Z.ToString()); + possibleMatch.Add(testPolyLoopCoord); + } + } + // no match found for the Z + else + { + continue; + } + } + } + //no match could be found for the Y + else + { + continue; + } + } + } + else + { + //not a match found for the X and continue + continue; + } + } + if (exactMatch.Count > 1) + { + report.MessageList.Add("Error, overlapping polyLoop coordinates found in the Test Opening PolyLoop."); + report.passOrFail = false; + return report; + } + else if (exactMatch.Count == 1) + { + report.MessageList.Add("One coordinate candidate found. Exact match"); + report.passOrFail = true; + return report; + } + if (possibleMatch.Count > 1) + { + report.MessageList.Add("No exact solution for a match of the polyLoop coordinate. More than one coordinate candidate found."); + report.passOrFail = false; + return report; + } + else if (possibleMatch.Count == 1) + { + report.MessageList.Add("One coordinate candidate found."); + report.passOrFail = true; + return report; + } + else + { + report.MessageList.Add("No coordinate candidate found."); + report.passOrFail = false; + return report; + } + + } + + + + //March15 2013 + //by CHarriman Senior Product Manager Carmel Software Corporation + //this is a function only used internally. It is used to verify if a surface object only has four coordinates, and if those coordinates form + //a square or rectangle. + private static bool IsSurfaceRegular(SurfaceDefinitions Surface) + { + //tests to see if all candidate surfaces and the standard surface are regular (rectangular polygons) + + bool isRegularPolygon = true; + //see if the standard surface has four coordinates defining its polyloop (one marker of a rectangle) + int standSurfaceCoordinateCount = Surface.PlCoords.Count; + if (standSurfaceCoordinateCount == 4) + { + //check the two potentially parallel sides, to ensure they are indeed parallel + Vector.CartVect v1 = Vector.CreateVector(Surface.PlCoords[0], Surface.PlCoords[1]); + Vector.CartVect v2 = Vector.CreateVector(Surface.PlCoords[2], Surface.PlCoords[3]); + Vector.CartVect v1xv2 = Vector.CrossProduct(v1, v2); + v1xv2 = Vector.UnitVector(v1xv2); + double magnitudev1xv2 = Vector.VectorMagnitude(v1xv2); + Vector.CartVect v3 = Vector.CreateVector(Surface.PlCoords[1], Surface.PlCoords[2]); + Vector.CartVect v4 = Vector.CreateVector(Surface.PlCoords[3], Surface.PlCoords[0]); + Vector.CartVect v3xv4 = Vector.CrossProduct(v3, v4); + v3xv4 = Vector.UnitVector(v3xv4); + double magnitudev3xv4 = Vector.VectorMagnitude(v3xv4); + //the unit vector will not be a number NaN if the Cross product detects a zero vector (indicating parallel vectors) + if (double.IsNaN(magnitudev1xv2) && double.IsNaN(magnitudev3xv4)) + { + isRegularPolygon = true; + } + else + { + isRegularPolygon = false; + } + } + else + { + //might as well stop here because + isRegularPolygon = false; + return isRegularPolygon; + } + return isRegularPolygon; + + } + + //March15 2013 + //by CHarriman Senior Product Manager Carmel Software Corporation + //this is a function only used internally. It is used to verify if an opening object only has four coordinates, and if those coordinates form + //a square or rectangle. + private static bool IsOpeningRegular(OpeningDefinitions Opening) + { + //tests to see if all candidate surfaces and the standard surface are regular (rectangular polygons) + + bool isRegularPolygon = true; + //see if the standard surface has four coordinates defining its polyloop (one marker of a rectangle) + int standSurfaceCoordinateCount = Opening.PlCoords.Count; + if (standSurfaceCoordinateCount == 4) + { + //check the two potentially parallel sides, to ensure they are indeed parallel + Vector.CartVect v1 = Vector.CreateVector(Opening.PlCoords[0], Opening.PlCoords[1]); + Vector.CartVect v2 = Vector.CreateVector(Opening.PlCoords[2], Opening.PlCoords[3]); + Vector.CartVect v1xv2 = Vector.CrossProduct(v1, v2); + v1xv2 = Vector.UnitVector(v1xv2); + double magnitudev1xv2 = Vector.VectorMagnitude(v1xv2); + Vector.CartVect v3 = Vector.CreateVector(Opening.PlCoords[1], Opening.PlCoords[2]); + Vector.CartVect v4 = Vector.CreateVector(Opening.PlCoords[3], Opening.PlCoords[0]); + Vector.CartVect v3xv4 = Vector.CrossProduct(v3, v4); + v3xv4 = Vector.UnitVector(v3xv4); + double magnitudev3xv4 = Vector.VectorMagnitude(v3xv4); + //the unit vector will not be a number NaN if the Cross product detects a zero vector (indicating parallel vectors) + if (double.IsNaN(magnitudev1xv2) && double.IsNaN(magnitudev3xv4)) + { + isRegularPolygon = true; + } + else + { + isRegularPolygon = false; + } + } + else + { + //might as well stop here because + isRegularPolygon = false; + return isRegularPolygon; + } + return isRegularPolygon; + + } + + private static double GetSurfaceArea(SurfaceDefinitions surface, double areaConversion) + { + + logger.Debug("STARTING SUBROUTINE: GetSurfaceArea."); + logger.Debug("PROGAMMER'S NOTE: PolyLoop coordinates will be used to calculate the area."); + double area = -1; + //there are two basic cases, one where we get the area using greens theorem when the surface is parallel + //to one of the axes of the project global reference frame + //and the second where the surface is not parallel to one of the axes of the global reference frame + //Surface normal Parallel to global reference frame X Axis + if (Math.Abs(surface.PlRHRVector.X) == 1 && surface.PlRHRVector.Y == 0 && surface.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the Y and Z coordinates and throw out the X because we can assume that they are all the same + //create new + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); + coordList.Add(c2); + + } + area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + logger.Debug("The coordinates of the standard file polyloop has been incorrectly defined."); + logger.Debug("The coordinates should be 2D and could not be translated to 2D"); + logger.Fatal("ATTENTION: Attempting to calculate surface area. Test may be inaccurate and requires gbXML.org support"); + } + } + //Surface normal Parallel to global reference frame y Axis + else if (surface.PlRHRVector.X == 0 && Math.Abs(surface.PlRHRVector.Y) == 1 && surface.PlRHRVector.Z == 0) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the X and Z coordinates and throw out the Y because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); + coordList.Add(c2); + + } + area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + logger.Debug("The coordinates of the standard file polyloop has been incorrectly defined."); + logger.Debug("The coordinates should be 2D and could not be translated to 2D"); + logger.Fatal("ATTENTION: Attempting to calculate surface area. Test may be inaccurate and requires gbXML.org support"); + + } + } + else if (surface.PlRHRVector.X == 0 && surface.PlRHRVector.Y == 0 && Math.Abs(surface.PlRHRVector.Z) == 1) + { + List coordList = new List(); + foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) + { + //only take the X and Y coordinates and throw out the Z because we can assume that they are all the same + Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); + coordList.Add(c2); + + } + area = GetAreaFrom2DPolyLoop(coordList); + if (area == -999) + { + logger.Debug("The coordinates of the standard file polyloop has been incorrectly defined."); + logger.Debug("The coordinates should be 2D and could not be translated to 2D"); + logger.Fatal("ATTENTION: Attempting to calculate surface area. Test may be inaccurate and requires gbXML.org support"); + + } + + + + } + //the surface is not aligned with one of the reference frame axes, which requires a bit more work to determine the right answer. + else + { + logger.Debug("The standard surface is not aligned along an axis, and will be rotated into a new coordinate frame"); + //New Z Axis for this plane is the normal vector, does not need to be created + //Get New Y Axis which is the surface Normal Vector cross the original global reference X unit vector (all unit vectors please + Vector.CartVect globalReferenceX = new Vector.CartVect(); + globalReferenceX.X = 1; + globalReferenceX.Y = 0; + globalReferenceX.Z = 0; + Vector.MemorySafe_CartVect localY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(surface.PlRHRVector, globalReferenceX)); + + + //new X axis is the localY cross the surface normal vector + Vector.MemorySafe_CartVect localX = Vector.UnitVector(Vector.CrossProduct(localY, surface.PlRHRVector)); + + //convert the polyloop coordinates to a local 2-D reference frame + //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane + List translatedCoordinates = new List(); + Vector.MemorySafe_CartCoord newOrigin = new Vector.MemorySafe_CartCoord(0, 0, 0); + translatedCoordinates.Add(newOrigin); + for (int j = 1; j < surface.PlCoords.Count; j++) + { + //randomly assigns the first polyLoop coordinate as the origin + Vector.MemorySafe_CartCoord origin = surface.PlCoords[0]; + //captures the components of a vector drawn from the new origin to the + Vector.CartVect distance = new Vector.CartVect(); + distance.X = surface.PlCoords[j].X - origin.X; + distance.Y = surface.PlCoords[j].Y - origin.Y; + distance.Z = surface.PlCoords[j].Z - origin.Z; + + //x coordinate is distance vector dot the new local X axis + double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; + //y coordinate is distance vector dot the new local Y axis + double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; + double tZ = 0; + Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX, tY, tZ); + translatedCoordinates.Add(translatedPt); + + } + area = GetAreaFrom2DPolyLoop(translatedCoordinates); + if (area == -999) + { + logger.Debug("The coordinates of the standard file polyloop has been incorrectly defined."); + logger.Debug("The coordinates should be 2D and could not be translated to 2D"); + logger.Fatal("ATTENTION: Attempting to calculate surface area. Test may be inaccurate and requires gbXML.org support"); + } + } + logger.Debug("ENDING SUBROUTINE: GetSurfaceArea"); + logger.Debug("Area:" + Math.Abs(area).ToString()); + if (area != -1 || area != -999) + { + return Math.Abs(area * areaConversion); + } + else return area; + } + + //February 20 2013 + //Created by Chien Si Harriman Senior Product Manager for the Carmel Software Corporation + //Currently the tool assumes that the polyloop is a valid one (counterclockwise coordinates) Previous checks ensure this is the case? + //and the segments of the polygon are not self-intersecting (there are no previous tests for this as of the date above) + private static double GetAreaFrom2DPolyLoop(List coordList) + { + int count = coordList.Count; + double areaprod = 0; + bool XisZero = true; + bool YisZero = true; + bool ZisZero = true; + //the following calculates the area of any irregular polygon + foreach (Vector.MemorySafe_CartCoord coord in coordList) + { + if (coord.X != 0) XisZero = false; + if (coord.Y != 0) YisZero = false; + if (coord.Z != 0) ZisZero = false; + } + if (!XisZero && !YisZero && !ZisZero) return -999; + + if (XisZero) + { + for (int i = 0; i < count; i++) + { + if (i < count - 1) + { + areaprod += (coordList[i].Y * coordList[i + 1].Z - coordList[i].Z * coordList[i + 1].Y); + } + else if (i == count - 1) + { + areaprod += (coordList[i].Y * coordList[0].Z - coordList[i].Z * coordList[0].Y); + } + } + areaprod = areaprod / 2; + } + else if (YisZero) + { + for (int i = 0; i < count; i++) + { + if (i < count - 1) + { + areaprod += (coordList[i].X * coordList[i + 1].Z - coordList[i].Z * coordList[i + 1].X); + } + else if (i == count - 1) + { + areaprod += (coordList[i].X * coordList[0].Z - coordList[i].Z * coordList[0].X); + } + } + areaprod = areaprod / 2; + } + else if (ZisZero) + { + for (int i = 0; i < count; i++) + { + if (i < count - 1) + { + areaprod += (coordList[i].X * coordList[i + 1].Y - coordList[i].Y * coordList[i + 1].X); + } + else if (i == count - 1) + { + areaprod += (coordList[i].X * coordList[0].Y - coordList[i].Y * coordList[0].X); + } + } + areaprod = areaprod / 2; + } + return areaprod; + } + + //private static double GetAreaFrom2DPolyLoop(List coordList) + //{ + // int count = coordList.Count; + // double areaprod = 0; + // bool XisZero = true; + // bool YisZero = true; + // bool ZisZero = true; + // //the following calculates the area of any irregular polygon + // foreach (Vector.MemorySafe_CartCoord coord in coordList) + // { + // if (coord.X != 0) XisZero = false; + // if (coord.Y != 0) YisZero = false; + // if (coord.Z != 0) ZisZero = false; + // } + // if (!XisZero && !YisZero && !ZisZero) return -999; + + // if (XisZero) + // { + // for (int i = 0; i < count; i++) + // { + // if (i < count - 1) + // { + // areaprod += (coordList[i].Y * coordList[i + 1].Z - coordList[i].Z * coordList[i + 1].Y); + // } + // else if (i == count - 1) + // { + // areaprod += (coordList[i].Y * coordList[0].Z - coordList[i].Z * coordList[0].Y); + // } + // } + // areaprod = areaprod / 2; + // } + // else if (YisZero) + // { + // for (int i = 0; i < count; i++) + // { + // if (i < count - 1) + // { + // areaprod += (coordList[i].X * coordList[i + 1].Z - coordList[i].Z * coordList[i + 1].X); + // } + // else if (i == count - 1) + // { + // areaprod += (coordList[i].X * coordList[0].Z - coordList[i].Z * coordList[0].X); + // } + // } + // areaprod = areaprod / 2; + // } + // else if (ZisZero) + // { + // for (int i = 0; i < count; i++) + // { + // if (i < count - 1) + // { + // areaprod += (coordList[i].X * coordList[i + 1].Y - coordList[i].Y * coordList[i + 1].X); + // } + // else if (i == count - 1) + // { + // areaprod += (coordList[i].X * coordList[0].Y - coordList[i].Y * coordList[0].X); + // } + // } + // areaprod = areaprod / 2; + // } + // return areaprod; + //} + + private static DOEgbXMLReportingObj CountFixedWindows(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"FixedWindow\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type FixedWindow counts are the same, or the test fails."; + //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML + //added Feb 13 2013 + + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "FixedWindow") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " fixed windows in the standard file and " + resultsArray[i - 1] + " fixed windows in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountOperableWindows(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"OperableWindow\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type FixedWindow counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "OperableWindow") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " operable windows in the standard file and " + resultsArray[i - 1] + " operable windows in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountFixedSkylights(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"FixedSkylight\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type FixedSkylight counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "FixedSkylight") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " fixed skylights in the standard file and " + resultsArray[i - 1] + " fixed skylights in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountOperableSkylights(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"OperableSkylights\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type OperableSkylights counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "OperableSkylight") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " operable skylights in the standard file and " + resultsArray[i - 1] + " operable skylights in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountSlidingDoors(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"SlidingDoor\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type SlidingDoor counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "SlidingDoor") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " sliding doors in the standard file and " + resultsArray[i - 1] + " sliding doors in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountNonSlidingDoors(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"NonSlidingDoor\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type NonSlidingDoor counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "NonSlidingDoor") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " non-sliding doors in the standard file and " + resultsArray[i - 1] + " non-sliding doors in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + + private static DOEgbXMLReportingObj CountAirOpenings(List gbXMLDocs, List gbXMLnsm, DOEgbXMLReportingObj report, string Units) + { + report.testSummary = "This test compares the total number of Opening elements with the openingType=\"Air\" in the test"; + report.testSummary += " and standard files. It does this by"; + report.testSummary += " simply counting up the total number of times that a \"\" tag appears with this Surface Type in both files."; + report.testSummary += " If the quantities are the same, this test passes, if different, it will fail. "; + report.testSummary += "The tolerance is zero for this test. In other words, the Opening of type Air counts are the same, or the test fails."; + report.unit = Units; + //assuming that this will be plenty large for now + string[] resultsArray = new string[50]; + int nodecount = 0; + for (int i = 0; i < gbXMLDocs.Count; i++) + { + nodecount = 0; + try + { + XmlDocument gbXMLTestFile = gbXMLDocs[i]; + XmlNamespaceManager gbXMLns = gbXMLnsm[i]; + + XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Surface/gbXMLv5:Opening", gbXMLns); + foreach (XmlNode surfaceNode in nodes) + { + XmlAttributeCollection spaceAtts = surfaceNode.Attributes; + foreach (XmlAttribute at in spaceAtts) + { + if (at.Name == "openingType") + { + string type = at.Value; + if (type == "Air") + { + nodecount++; + } + break; + } + } + } + + //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result + resultsArray[i] = nodecount.ToString(); + if (i % 2 != 0) + { + //setup standard result and test result + report.standResult.Add(resultsArray[i]); + report.testResult.Add(resultsArray[i - 1]); + report.idList.Add(""); + + double difference = Math.Abs(Convert.ToInt32(resultsArray[i]) - Convert.ToInt32(resultsArray[(i - 1)])); + if (difference <= report.tolerance) + { + report.longMsg = "The " + report.testType + " matches standard file, the difference was within tolerance = " + report.tolerance.ToString() + " " + Units; + report.passOrFail = true; + return report; + } + else + { + report.longMsg = "The " + report.testType + " does not match standard file, the difference was not within tolerance = " + report.tolerance.ToString() + " " + Units + ". Difference of: " + difference + + ". " + resultsArray[i] + " air openings in the standard file and " + resultsArray[i - 1] + " air openings in the test file."; + report.passOrFail = false; + return report; + } + } + else { continue; } + + } + catch (Exception e) + { + report.MessageList.Add(e.ToString()); + report.longMsg = " Failed to locate " + report.testType + " in the XML file."; + report.passOrFail = false; + return report; + } + } + report.longMsg = "Fatal " + report.testType + " Test Failure"; + report.passOrFail = false; + return report; + } + #endregion + } +} diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLReportingObj.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLReportingObj.cs new file mode 100644 index 0000000..e95125b --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLReportingObj.cs @@ -0,0 +1,75 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; + +namespace DOEgbXML +{ + public class DOEgbXMLReportingObj + { + //new ReportingObj strings created Feb 13 2013 + public string testSummary; + public string testReasoning; + // public Dictionary globalTestCriteria; + //original ReportingObj created Jan 1 2013 + public List standResult; + public List testResult; + public List idList; + public double tolerance; + public TestType testType; + public int subTestIndex = -1; + public string unit; + public Dictionary TestPassedDict; + public bool passOrFail; + public List MessageList; + public string longMsg; + // public Dictionary> MatchedSurfaceIds; + // public Dictionary> MatchedOpening; + + public void Clear() + { + + if (standResult != null) + standResult.Clear(); + + if (testResult != null) + testResult.Clear(); + + if (idList != null) + idList.Clear(); + tolerance = DOEgbXMLBasics.Tolerances.ToleranceDefault; + testType = TestType.None; + subTestIndex = -1; + passOrFail = false; + if (MessageList != null) { MessageList.Clear(); } + if (TestPassedDict != null) { TestPassedDict.Clear(); } + longMsg = ""; + } + + public DOEgbXMLReportingObj Copy() + { + DOEgbXMLReportingObj report = new DOEgbXMLReportingObj(); + + report.standResult = new List(this.standResult); + report.testResult = new List(this.testResult); + report.idList = new List(this.idList); + report.TestPassedDict = new Dictionary(this.TestPassedDict); + report.MessageList = new List(this.MessageList); + // if (this.MatchedSurfaceIds != null) + // report.MatchedSurfaceIds = new Dictionary>(this.MatchedSurfaceIds); + + + report.tolerance = this.tolerance; + report.testType = this.testType; + report.subTestIndex = this.subTestIndex; + report.unit = this.unit; + report.passOrFail = this.passOrFail; + report.longMsg = this.longMsg; + + report.testSummary = this.testSummary; + + return report; + } + + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLSurface.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLSurface.cs new file mode 100644 index 0000000..1476427 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLSurface.cs @@ -0,0 +1,28 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using VectorMath; + +namespace DOEgbXML +{ + class SurfaceDefinitions + { + //creates instances of an object that store information about surfaces in a gbXML file + public string SurfaceType; + public string SurfaceId; + public List AdjSpaceId; + public double Azimuth; + public double Tilt; + public double Height; + public double Width; + public Vector.MemorySafe_CartCoord InsertionPoint; + public List PlCoords; + public Vector.MemorySafe_CartVect PlRHRVector; + } + class SurfaceResults + { + public int matchCount; + public Dictionary> SurfaceIdMatch; + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestCriteriaObject.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestCriteriaObject.cs new file mode 100644 index 0000000..9b581cf --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestCriteriaObject.cs @@ -0,0 +1,450 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; + +namespace DOEgbXML +{ + public class DOEgbXMLTestCriteriaObject + { + public Dictionary TestCriteriaDictionary; + + + public void InitializeTestCriteriaWithTestName(string testname) + { + TestCriteriaDictionary = new Dictionary(); + + + if (testname == "Test1") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add( TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add( TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add( TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test , true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume , true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count , false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count,false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count,false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test2") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, true); //shades must match + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test3") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test4") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, true); //shades must match + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test5") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, true); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, true); //shading surface must match + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test6") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test7") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test8") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, true); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test12") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test25") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count,false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count,false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Test28") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, true); //shading count must match + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Whole Building Test 1") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + else if (testname == "Whole Building Test 2") + { + DOEgbXMLBasics.SliversAllowed = true; + TestCriteriaDictionary.Add(TestType.Building_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Count, true); + TestCriteriaDictionary.Add(TestType.Building_Story_Z_Height, true); + TestCriteriaDictionary.Add(TestType.Building_Story_PolyLoop_RHR, true); + TestCriteriaDictionary.Add(TestType.SpaceId_Match_Test, true); + TestCriteriaDictionary.Add(TestType.Space_Area, true); + TestCriteriaDictionary.Add(TestType.Space_Volume, true); + TestCriteriaDictionary.Add(TestType.Total_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Exterior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Underground_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Wall_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Interior_Floor_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Roof_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Shading_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Surface_Count, false); + TestCriteriaDictionary.Add(TestType.Surface_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Surface_Checks, true); + TestCriteriaDictionary.Add(TestType.Fixed_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Windows_Count, false); + TestCriteriaDictionary.Add(TestType.Fixed_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Operable_Skylight_Count, false); + TestCriteriaDictionary.Add(TestType.Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Non_Sliding_Doors_Count, false); + TestCriteriaDictionary.Add(TestType.Air_Openings_Count, false); + TestCriteriaDictionary.Add(TestType.Opening_Planar_Test, true); + TestCriteriaDictionary.Add(TestType.Detailed_Opening_Checks, true); + //As of Feb 13 2013, this test is for a future release. Placeholder only + TestCriteriaDictionary.Add(TestType.Shell_Geom_RHR, false); + } + + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestDetail.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestDetail.cs new file mode 100644 index 0000000..1deb26b --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLTestDetail.cs @@ -0,0 +1,249 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; + +namespace DOEgbXML +{ + public class DOEgbXMLTestDetail + { + public string testName; + public string testSummary; + public string passString; + public string failString; + public string shortTitle; + public double thinWalledAltBuildingArea; + public double thinWalledExpectedBuildingArea; + public List ThinWalledSpecs { get; set; } + + public DOEgbXMLTestDetail() + { + ThinWalledSpecs = new List(); + } + //holds a bunch of strings for a given test + //this list will have this format: TestShortTitle, Pass String, Fail String, Summary String + // public List testString = new List(); + + //this List will store all the test Detail + public List TestDetailList; + + + public void InitializeTestResultStrings() + { + //holds the Detail object for all the tests + TestDetailList = new List(); + + //get the strings for the summary page table + //initialize the testdetails for all the tests + //DOEgbXMLTestDetail test1detail = new DOEgbXMLTestDetail(); + //DOEgbXMLTestDetail test2detail = new DOEgbXMLTestDetail(); + DOEgbXMLTestDetail test3detail = new DOEgbXMLTestDetail(); + //DOEgbXMLTestDetail test4detail = new DOEgbXMLTestDetail(); + //DOEgbXMLTestDetail test5detail = new DOEgbXMLTestDetail(); + DOEgbXMLTestDetail test6detail = new DOEgbXMLTestDetail(); + + DOEgbXMLTestDetail test7detail = new DOEgbXMLTestDetail(); + DOEgbXMLTestDetail test8detail = new DOEgbXMLTestDetail(); + DOEgbXMLTestDetail test12detail = new DOEgbXMLTestDetail(); + + //DOEgbXMLTestDetail test25detail = new DOEgbXMLTestDetail(); + //DOEgbXMLTestDetail test28detail = new DOEgbXMLTestDetail(); + + DOEgbXMLTestDetail testwholeBuild1detail = new DOEgbXMLTestDetail(); + //DOEgbXMLTestDetail testwholeBuild2detail = new DOEgbXMLTestDetail(); + + //create the strings + //reach test. TBD + //Test1 + //test1detail.testName = "Test1"; + //test1detail.shortTitle = "2 Walls of Different Thicknesses with Parallel Aligned Faces"; + //test1detail.testSummary = "This test is designed to make sure that when walls of different thicknesses are joined with their faces aligned, that the centerline offset does not create extra walls during the gbXML creation process. If these extra sliver walls are found in the gbXML file, this test will fail."; + //test1detail.passString = "This test has passed."; + //test1detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + //TestDetailList.Add(test1detail); + + //reach test. TBD + //test 2 + //test2detail.testName = "Test2"; + //test2detail.shortTitle = "Single window with overhang that bisects the window's height."; + //test2detail.testSummary = "A 1-zone, one story, simple model with exterior shading devices that act as overhangs and exterior light shelves for windows on the south façade. Light shelves are 1” thick and split a single window instance in the BIM along its centerline. This test is designed to ensure that this window should be represented as two windows in gbXML, the one window that is above the overhang, and the other that is below."; + //test2detail.passString = "This test has passed."; + //test2detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + //TestDetailList.Add(test2detail); + + //test 3 + test3detail.testName = "Test3"; + test3detail.shortTitle = "Interior walls and Floor Second Level Space Boundary Test "; + test3detail.testSummary = "A 5-zone model with overlapping zones and a double-height zone. This test is designed to ensure that the tool used to create the zones can properly follow the basic conventions for second level space boundaries."; + test3detail.passString = "This test has passed."; + test3detail.failString = "This test has failed."; + test3detail.thinWalledAltBuildingArea = 6321.25; + test3detail.thinWalledExpectedBuildingArea = 6500.00; + ThinWalledAlternatives sp2 = new ThinWalledAlternatives("sp-2-Space",600,7800); //spacename in standard file, thin walled area (ft2), thin walled volume (ft3) + test3detail.ThinWalledSpecs.Add(sp2); + ThinWalledAlternatives sp3 = new ThinWalledAlternatives("sp-3-Space", 1400, 18200); + test3detail.ThinWalledSpecs.Add(sp3); + ThinWalledAlternatives sp4 = new ThinWalledAlternatives("sp-4-Space",2500,65000); + test3detail.ThinWalledSpecs.Add(sp4); + ThinWalledAlternatives sp5 = new ThinWalledAlternatives("sp-5-Space", 1400, 18200); + test3detail.ThinWalledSpecs.Add(sp5); + ThinWalledAlternatives sp6 = new ThinWalledAlternatives("sp-6-Space",600,7800); + test3detail.ThinWalledSpecs.Add(sp6); + //ADD list to local testStrings List of Lists + TestDetailList.Add(test3detail); + + //reach test. TBD + //test 4 + //test4detail.testName = "Test4"; + //test4detail.shortTitle = "Double height space with hole cut in floor and a skylight"; + //test4detail.testSummary = "This test is a large open atrium with a hole cut in the floor to allow light to penetrate through to the floor below."; + //test4detail.passString = "This test has passed."; + //test4detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + //TestDetailList.Add(test4detail); + + //reach test. TBD + //test 5 + //test5detail.testName = "Test5"; + //test5detail.shortTitle = "Basement walls that extend above grade and bound two different spaces"; + //test5detail.testSummary = "A two zone model that ensures exterior walls can properly be defined as underground and above grade. A single wall has been drawn by the user that begins below grade, and terminates above grade. Above grade, the walls bound a space that is above grade. Below grade, the walls bound a space that is entirely below grade."; + //test5detail.passString = "This test has passed."; + //test5detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + //TestDetailList.Add(test5detail); + + + //Test 6 + test6detail.testName = "Test6"; + test6detail.shortTitle = "Simple box adjacency test."; + test6detail.testSummary = "All above-grade zones, being tested to see if adjacency relationships are preserved. Zones with surfaces touching one another should have these surfaces defined as \"Interior\" types and the correct adjacency conditions."; + test6detail.passString = "This test has passed."; + test6detail.failString = "This test has failed."; + test6detail.thinWalledExpectedBuildingArea = 1160; + test6detail.thinWalledAltBuildingArea = 1160; + //ADD list to local testStrings List of Lists + ThinWalledAlternatives sp_0_0_6 = new ThinWalledAlternatives("Space_0_0", 6027.79, 138433.493); //remember has to be in feet2 and feet3 + test6detail.ThinWalledSpecs.Add(sp_0_0_6); + ThinWalledAlternatives sp_1_0_6 = new ThinWalledAlternatives("Space_1_0", 2152.78, 49440.533); + test6detail.ThinWalledSpecs.Add(sp_1_0_6); + ThinWalledAlternatives sp_2_0_6 = new ThinWalledAlternatives("Space_2_0", 2152.78, 49440.533); + test6detail.ThinWalledSpecs.Add(sp_2_0_6); + ThinWalledAlternatives sp_3_0_6 = new ThinWalledAlternatives("Space_3_0", 2152.78, 49440.533); + test6detail.ThinWalledSpecs.Add(sp_3_0_6); + TestDetailList.Add(test6detail); + + //test 7 + test7detail.testName = "Test7"; + test7detail.shortTitle = "Folded roof element."; + test7detail.testSummary = "This is the first in a proposed series of tests that focus on roof elements that grow in geometric complexity."; + test7detail.passString = "This test has passed."; + test7detail.failString = "This test has failed."; + test7detail.thinWalledExpectedBuildingArea = 3200; + test7detail.thinWalledAltBuildingArea = 3042; + ThinWalledAlternatives sp1_7 = new ThinWalledAlternatives("sp-1-Space", 1600, 16000); //remember has to be in feet2 and feet3 + test7detail.ThinWalledSpecs.Add(sp1_7); + ThinWalledAlternatives sp2_7 = new ThinWalledAlternatives("sp-2-Space", 1600, 22925.25); //remember has to be in feet2 and feet3 + test7detail.ThinWalledSpecs.Add(sp2_7); + //ADD list to local testStrings List of Lists + TestDetailList.Add(test7detail); + + //test 8 + test8detail.testName = "Test8"; + test8detail.shortTitle = "Sloping slab on grade"; + test8detail.testSummary = "Ensures that sloping slab on grade comes through properly in gbXML, and that walls, which terminate at grade, are turned into the appropriate surfaceType (\"UndergroundWall\")"; + test8detail.passString = "This test has passed."; + test8detail.failString = "This test has failed."; + test8detail.thinWalledExpectedBuildingArea = 6029.88; + test8detail.thinWalledAltBuildingArea = 5870.7183; + //ADD list to local testStrings List of Lists + ThinWalledAlternatives sp1_Aud_occ = new ThinWalledAlternatives("sp-1-Occupied_Auditorium", 6029.88, 90000); //enter expected thin walled values + test8detail.ThinWalledSpecs.Add(sp1_Aud_occ); + ThinWalledAlternatives sp2_Aud_unocc = new ThinWalledAlternatives("sp-2-Unoccupied_Auditorium",6000,120000); + test8detail.ThinWalledSpecs.Add(sp2_Aud_unocc); + ThinWalledAlternatives sp3_Roof = new ThinWalledAlternatives("sp-3-Roof_Void", 6000, 45000); + test8detail.ThinWalledSpecs.Add(sp3_Roof); + TestDetailList.Add(test8detail); + + //test 12 + test12detail.testName = "Test12"; + test12detail.shortTitle = "Simple box stacking test."; + test12detail.testSummary = "All above-grade zones, being tested to see if adjacency relationships are preserved. Zones with surfaces touching one another should have these surfaces defined as \"Interior\" types and the correct adjacency conditions."; + test12detail.passString = "This test has passed."; + test12detail.failString = "This test has failed."; + test12detail.thinWalledExpectedBuildingArea = 16791.7003; //this is already a thin walled test case, so the two are the same + test12detail.thinWalledAltBuildingArea = 16791.7003; + //ADD list to local testStrings List of Lists + ThinWalledAlternatives sp_0_0_12 = new ThinWalledAlternatives("Space_0_0", 6027.79, 138433.493); //remember has to be in feet2 and feet3 + test12detail.ThinWalledSpecs.Add(sp_0_0_12); + ThinWalledAlternatives sp_1_0_12 = new ThinWalledAlternatives("Space_1_0", 2152.78, 49440.533); + test12detail.ThinWalledSpecs.Add(sp_1_0_12); + ThinWalledAlternatives sp_2_0_12 = new ThinWalledAlternatives("Space_2_0", 2152.78, 49440.533); + test12detail.ThinWalledSpecs.Add(sp_2_0_12); + ThinWalledAlternatives sp_3_0_12 = new ThinWalledAlternatives("Space_3_0", 2152.78, 49440.533); + test12detail.ThinWalledSpecs.Add(sp_3_0_12); + ThinWalledAlternatives sp_4_0_12 = new ThinWalledAlternatives("Space_4_0", 4305.564, 98881.0668); + test12detail.ThinWalledSpecs.Add(sp_4_0_12); + TestDetailList.Add(test12detail); + + //test whole building 1 + testwholeBuild1detail.testName = "Whole Building Test 1"; + testwholeBuild1detail.shortTitle = "Test for multi-floor building with ceiling return plenum."; + testwholeBuild1detail.testSummary = "Ensures that the plenum horizontal surfaces are properly translated into interior surfaces, and have the proper adjacency conditions."; + testwholeBuild1detail.passString = "This test has passed."; + testwholeBuild1detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + TestDetailList.Add(testwholeBuild1detail); + + //test whole building 2 + //testwholeBuild2detail.testName = "Whole Building Test 2"; + //testwholeBuild2detail.shortTitle = "Test for plenums and multi-zone objects over basement."; + //testwholeBuild2detail.testSummary = "Tests a simple building that is multiple stories and with plenums. This is a very standard building, typical of DOE prototype buildings, e.g. In addition to the features of Whole Building Test Case 1, this building also has underground surfaces. Ensures that the plenum horizontal surfaces are properly translated into interior surfaces, and have the proper adjacency conditions."; + //testwholeBuild2detail.passString = "This test has passed."; + //testwholeBuild2detail.failString = "This test has failed."; + //ADD list to local testStrings List of Lists + //TestDetailList.Add(testwholeBuild2detail); + + ////test 25 + //test25detail.testName = "Test25"; + //test25detail.shortTitle = "Stacked interior walls with openings"; + //test25detail.testSummary = "A simplified 4-zone model of a building that has interior walls stacked on top of one another. The interior walls each have openings cut into them, to simulate something that may be drawn as a hallway by a designer."; + //test25detail.passString = "This test has passed."; + //test25detail.failString = "This test has failed."; + ////ADD list to local testStrings List of Lists + //TestDetailList.Add(test25detail); + + ////test 28 + //test28detail.testName = "Test28"; + //test28detail.shortTitle = "Roof eaves are turned into shading devices automatically"; + //test28detail.testSummary = "A simplified 3-zone model of a building shaped like a residential home has been created. The home is a simple two story example that has a small attic formed by a roof with a 30 degree pitch which slopes along one of the site’s Cartesian axes. This test is a simple test that ensures the authoring tool is able to automatically break the roof into a space bounding object and shade object appropriately without any user intervention."; + //test28detail.passString = "This test has passed."; + //test28detail.failString = "This test has failed."; + ////ADD list to local testStrings List of Lists + //TestDetailList.Add(test28detail); + + + } + } + + public class ThinWalledAlternatives + { + public string SpaceName { get; set; } + public double FloorArea { get; set; } + public double Volume { get; set; } + + public ThinWalledAlternatives() + { + + } + + public ThinWalledAlternatives(string name,double area,double vol) + { + SpaceName = name; + FloorArea = area; + Volume = vol; + } + } + +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLValidator.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLValidator.cs new file mode 100644 index 0000000..2712172 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLValidator.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Xml; +using System.IO; +using System.Xml.Schema; + +namespace DOEgbXML +{ + public class DOEgbXMLValidator + { + + public int nErrors = 0; + public int nWarnings = 0; + private string strErrorMsg = string.Empty; + public string Errors { get { return strErrorMsg; } } + public string BigError; + public string filePath { get; set; } + + public DOEgbXMLValidator() + { + filePath = "SupportFiles/XSD/GreenBuildingXML_Ver5.10.xsd"; + } + + public DOEgbXMLValidator(string filename) + { + filePath = "SupportFiles/XSD/" + filename; + } + + public bool IsValidXmlEx(XmlReader xmlStream) + { + bool bStatus = false; + try + { + // Declare local objects + // Improve to allow any schema version to be selected. + string xsdSchemaLocalLocation = Path.Combine(HttpRuntime.AppDomainAppPath, this.filePath); + XmlReaderSettings rs = new XmlReaderSettings(); + rs.ValidationType = ValidationType.Schema; + rs.ValidationFlags |= XmlSchemaValidationFlags.ProcessSchemaLocation | XmlSchemaValidationFlags.ReportValidationWarnings; + rs.ValidationEventHandler += new ValidationEventHandler(rs_ValidationEventHandler); + + //add schema + rs.Schemas.Add(null, xsdSchemaLocalLocation); + + + using (XmlReader xmlValidatingReader = XmlReader.Create(xmlStream, rs)) + { + while (xmlValidatingReader.Read()) + { + } + } + + ////Exception if error.s + if (nErrors > 0) + { + throw new Exception(strErrorMsg); + } + else { bStatus = true; }//Success + } + catch (Exception error) + { + BigError = "BIG ERROR: " + error + "
"; + bStatus = false; + } + + return bStatus; + } + + void rs_ValidationEventHandler(object sender, ValidationEventArgs e) + { + + if (e.Severity == XmlSeverityType.Warning) + { + strErrorMsg += "

" + "WARNING: " + e.Exception.Message + " Line Position " + e.Exception.LinePosition + " Line Number: " + e.Exception.LineNumber + "

"; + nWarnings++; + } + else if(!e.Exception.Message.Contains("The element cannot contain white space. Content model is empty.")) + { + + strErrorMsg += "

" + "ERROR: " + e.Exception.Message + " Line Position " + e.Exception.LinePosition + " Line Number: " + e.Exception.LineNumber + "

"; + nErrors++; + } + } + + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLenum.cs b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLenum.cs new file mode 100644 index 0000000..a790f92 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/DOEgbXMLenum.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; + +namespace DOEgbXML +{ + public enum TestType + { + None, + Building_Area, + Space_Count, + Building_Story_Count, + Building_Story_Z_Height, + Building_Story_PolyLoop_RHR, + SpaceId_Match_Test, + Space_Area, + Space_Volume, + Total_Surface_Count, + Exterior_Wall_Surface_Count, + Underground_Surface_Count, + Interior_Wall_Surface_Count, + Interior_Floor_Surface_Count, + Roof_Surface_Count, + Shading_Surface_Count, + Air_Surface_Count, + Surface_Planar_Test, + Detailed_Surface_Checks, + Fixed_Windows_Count, + Operable_Windows_Count, + Fixed_Skylight_Count, + Operable_Skylight_Count, + Sliding_Doors_Count, + Non_Sliding_Doors_Count, + Air_Openings_Count, + Opening_Planar_Test, + Detailed_Opening_Checks, + Shell_Geom_RHR + } + public class DOEgbXMLenum + { + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/Vector.cs b/XMLValidatorWeb/DOEgbXMLClass/Vector.cs new file mode 100644 index 0000000..4392ab3 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/Vector.cs @@ -0,0 +1,86 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace VectorMath +{ + public class Vector + { + public class CartCoord + { + public double X { get; set; } + public double Y { get; set; } + public double Z { get; set; } + } + + public class CartVect + { + public double X { get; set; } + public double Y { get; set; } + public double Z { get; set; } + } + + public static CartVect CreateVector(CartCoord cd1, CartCoord cd2) + { + CartVect vector = new CartVect(); + vector.X = cd2.X - cd1.X; + vector.Y = cd2.Y - cd1.Y; + vector.Z = cd2.Z - cd1.Z; + return vector; + } + + public static Double VectorMagnitude(CartVect vector) + { + double magnitude= 0.0; + + magnitude = Math.Sqrt(Math.Pow((vector.X),2) + Math.Pow((vector.Y),2) + Math.Pow((vector.Z),2)); + return magnitude; + } + + public static CartVect UnitVector(CartVect vector) + { + CartVect UV = new CartVect(); + double magnitude = VectorMagnitude(vector); + + UV.X = vector.X / magnitude; + UV.Y = vector.Y / magnitude; + UV.Z = vector.Z / magnitude; + return UV; + } + + public static CartVect CrossProduct(CartVect vector1, CartVect vector2) + { + CartVect xProd = new CartVect(); + + xProd.X = vector2.Z * vector1.Y - vector1.Z * vector2.Y; + xProd.Y = vector2.Z * vector1.X - vector1.Z * vector2.X; + xProd.Z = vector2.Y * vector1.X - vector1.Y * vector2.X; + return xProd; + } + + public double getPlanarSA(List polygonVect) + { + List normalizedPlane = new List(); + //the new plane's first coordinate is arbitrarily set to zero + normalizedPlane[0].X = 0; + normalizedPlane[0].Y = 0; + normalizedPlane[0].Z = 0; + double diffX = 0; + double diffY = 0; + double diffZ = 0; + + double surfaceArea = -1; + int numPoints = polygonVect.Count; + for(int i=0; i 0) + { + + } + } + return surfaceArea; + } + } +} diff --git a/XMLValidatorWeb/DOEgbXMLClass/gbXML2IDF.cs b/XMLValidatorWeb/DOEgbXMLClass/gbXML2IDF.cs new file mode 100644 index 0000000..958b9b5 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/gbXML2IDF.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Xml; +using System.IO; + +namespace XMLValidatorWeb.DOEgbXMLClass +{ + public class gbXML2IDF + { + public void gbXMLToIDF(string filelocation) + { + XmlDocument gbxf = new XmlDocument(); + + //getXMLFile + gbxf.Load(filelocation); + XmlNamespaceManager gbxmlns = new XmlNamespaceManager(gbxf.NameTable); + gbxmlns.AddNamespace("gbXMLv5", "http://www.gbxml.org/schema"); + + //make zone file + string zonestring = makeZones(gbxf, gbxmlns); + + } + + public string makeZones(XmlDocument gbxf,XmlNamespaceManager gbxmlns) + { + string zones = ""; + XmlNodeList nodes = gbxf.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space", gbxmlns); + int nodecount = nodes.Count; + foreach (XmlNode node in nodes) + { + + } + + return zones; + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/DOEgbXMLClass/gbXMLMatches.cs b/XMLValidatorWeb/DOEgbXMLClass/gbXMLMatches.cs new file mode 100644 index 0000000..93f4da8 --- /dev/null +++ b/XMLValidatorWeb/DOEgbXMLClass/gbXMLMatches.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; + +namespace DOEgbXML +{ + public class gbXMLMatches + { + public Dictionary> MatchedSurfaceIds; + public Dictionary> MatchedOpeningIds; + + public void Init() + { + MatchedSurfaceIds = new Dictionary>(); + MatchedOpeningIds = new Dictionary>(); + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Default.aspx b/XMLValidatorWeb/Default.aspx new file mode 100644 index 0000000..4d0cf43 --- /dev/null +++ b/XMLValidatorWeb/Default.aspx @@ -0,0 +1,10 @@ +<%@ Page Language="C#" MasterPageFile="~/MasterPage.Master" AutoEventWireup="true" + CodeBehind="Default.aspx.cs" Inherits="XMLXSDValidator._Default" %> + + +
+

+ Test Page +

+
+
diff --git a/XMLValidatorWeb/Default.aspx.cs b/XMLValidatorWeb/Default.aspx.cs new file mode 100644 index 0000000..10b620f --- /dev/null +++ b/XMLValidatorWeb/Default.aspx.cs @@ -0,0 +1,22 @@ +using System; +using System.Collections.Generic; +using System.Web; +using System.Web.UI; +using System.Web.UI.WebControls; +using System.Xml; +using System.Xml.Schema; +using System.IO; + +namespace XMLXSDValidator +{ + public partial class _Default : System.Web.UI.Page + { + + + + protected void LoginView1_ViewChanged1(object sender, EventArgs e) + { + + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Default.aspx.designer.cs b/XMLValidatorWeb/Default.aspx.designer.cs new file mode 100644 index 0000000..ac28f88 --- /dev/null +++ b/XMLValidatorWeb/Default.aspx.designer.cs @@ -0,0 +1,24 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace XMLXSDValidator { + + + public partial class _Default { + + /// + /// HyperLink1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.HyperLink HyperLink1; + } +} diff --git a/XMLValidatorWeb/GreenBuildingXML_Ver5.10.xsd b/XMLValidatorWeb/GreenBuildingXML_Ver5.10.xsd new file mode 100644 index 0000000..737de2c --- /dev/null +++ b/XMLValidatorWeb/GreenBuildingXML_Ver5.10.xsd @@ -0,0 +1,6478 @@ + + + + + gbXML version 5.10 - maintained by gbXML.org. + + + Change Log: + +5.10 + 10/2012 + +1. Added new element: ProjectEntity under ProgramInfo and it contains three new elements Filename, URI, and GUID. +2. Added new element: Filename for storing filenames of project files that created or modified the gbXML file. +3. Added new element: URI for storing location of filename or project that created or modified the gbXML file. +4. Added new element: GUID for storing unique ids for models, projects, simulations, and files that created or modified the gbXML file. GUID also has entity attribute that uses entityType enumeration. +5. Added new enumeration: entityType which has four values, Model, File, Project, Run. +6. Added programId attribute to the following elements; LightPowerPerArea, EquipPowerPerArea, InfiltrationFlow, PeopleNumber, PeopleHeatGain, Temperature, PeakDomesticHotWaterFlow, Weather, Schedule, IntEquip, ExtEquip, OAFlowPerPerson, OAFlowPerArea, DesignHeatT, DesignCoolT, Construction, HydronicLoop, HydronicLoopEquipment, AirLoop, AirLoopEquipment, LightingSystem, LightingControl, Lighting, WindowType, Vegetation, Weather, Meter, Transportation, Zone, ShadeControl, AltEnergySource, FlowPerArea, FlowPerPerson, and Flow. +7. Added Lux and Phot to illuminanceUnitEnum. +8. Added Damper, Boiler, Chiller to controlTypeEnum. +9. Added unit attribute using temperatureUnitEnum to DesignTemp, MinTemp, and MaxTemp. +10. Added unit attribute using flowUnitEnum to DesignFlow, MaximumFlow, and MinimumFlow. +11. Added LPM to flowUnitEnum. +12. Added primaryLoopId attribute to HydronicLoop. +13. Added Location element to Weather element. +14. Remove City, Country, StateorProvince elements from Weather element as Location element superceeds it. +15. Added Modeled to stationTypeEnumeration. +16. Added DataCenter to buildingTypeEnum. +17. Added ServerRoom to spaceTypeEnum. +18. Remove CADModelId from CreatedBy element as ProgramInfo and ProjectEntites superceeds it. +19. Changed minOccurs value to 4 for Surface sub-element of Campus main element +20. Changed minOccurs value to 0 for Surface main element + +5.01 + 4/2012 + +1. Deleted "surfaceNormalConvention" element. No longer used. +2. Deleted "surfaceNormalEnum" enumeration. No longer used. + +5.00 + 10/2011 + +1. Added new element: YearModeled. It consists of BeginDate, EndDate, and HolidaysModeled child elements. +2. Added new element: HolidaysModeled. It consists of Name, Description, and HolidayDate child elements. +3. Added new element: HolidayDate (type: date) +4. Added YearModeled child element to Campus element +5. Added 5 new enumeration items to the extEquipTypeEnum enumeration (Types of exterior equipment): +a. WindTurbine +b. Photovoltaic +c. CogenDiesel +d. CogenFuelCell +e. CogenGasTurbine +6. Added new element: HeatRejectedtoSpace with unitlessUnitEnum attribute +7. Added new element: Weight with attribute weightUnitEnum +8. Added Weight, HeatRejectedtoSpace child elements to IntEquip element +9. Added Weight child element to ExtEquip, airLoopEquip, HydronicEquip elements +10 Added new element: HeatRejectedtoSpace (type: decimal). Also added annotation explaining this element +11. Added HeatRejectedtoSpace child element to intEquip +12. Added Exhaust item to enumeration systemTypeEnum +13. Added new enumeration: resetTemperatureTypeEnum with 3 items: None, Automatic, OutdoorAirBased +14. Added new element: ResetTemperature with attributes: temperatureUnitEnum, resetTemperatureTypeEnum +15. Added ResetTemperature child element to AirLoopEquipment element +16. Updated the annotation of the AirLoopEquipment element to explain how they should be ordered in an AirLoop +17. Added new element: MotorInAirstream (type: boolean) +18. Added MotorInAirstream child element to AirLoopEquipment element +19. Added new element: Enthalpy with attribute enthalpyUnitEnum +20. Added new enumeration: enthalpyUnitEnum with 2 items: BTUPerLb and KJPerKg +21. Added Enthalpy child element to AirLoopEquipment element +22. Added 2 enumeration items to equipmentTypeEnum: EvaporativePreCooler, PreHeatCoil +23. Added 2 enumeration items to efficiencyTypeEnum: BTUPerHourPerF, kWPerC. Also, added annotation saying that this applies to StandByLoss for water heaters +24. Updated SolarHeatGainCoeff child element of Opening element so that it is unbounded (i.e. - many SHGC values for an Opening) +25. Updated AdjacentSpaceId annotation to better explain its use +26. Updated PolyLoop annotation to better explain its use +27. Added ProductName, Version, Platform elements (All of type: string) +28. Added AirChangesPerHour child element to Space element +29. Updated ShellGeometry annotation with clarification +30. Updated SpaceBoundary annotation with clarification +31. Added oppositeIdRef attribute (type: IDREF) to SpaceBoundary +32. Added SpaceBoundary child element to Building element along with annotation +33. Added new element: FamilyName (type: string) +34. Added FamilyName child element to Surface +35. Added new elements: Address1, Address2, City, State, ZipCode, Country, PhoneNumber (all of type: string) +36. Added Address1, Address2, City, State, ZipCode, Country, PhoneNumber child elements to PersonInfo element +37. Added ExtEquipId child element to both Construction and WindowType elements for purposes of modeling BIPV. +38. Added 2 enumeration items to powerTypeEnum: Diesel, Biogas +39. Added 1 enumeration item to powerUseTypeEnum: ElectricityGeneration +40. Added 2 enumeration items to tempTypeEnum: HighTempLockout, LowTempLockout +41. Added new enumeration: windSpeedEnum with 3 items: RatedWindSpeed, CutInWindSpeed, CutOutWindSpeed +42. Added new element: WindSpeed with attributes velocityUnitEnum, windSpeedEnum +43. Added WindSpeed child element to ExtEquip element +44. Added ResetTemperature child element to HydronicLoopEquipment element +45. Updated Equation element documentation to clarify dependent vs. independent variables +45. Updated IndependentVariable child element of Equation element to allow for infinite number of independent variables +46. Updated all instances of text "Dependent" and "Independent". It was misspelled. +47. Updated Performance element documentation to clarify that it can represent all types of performance metrics + +0.37 + 12/08 + Added version attribute to gbXML element. + Added the ability to have additional SolarHeatGainCoeff values for Openings and WindowTypes for different angles of incidence defined by a new attribute in the SolarHeatGainCoeff element. + Added the Photometry element to the LightingSystem element. + Added the CADModelID to the CreatedBy element to track the CAD/BIM model the gbXML was created from. + Added CoefficientOfUtilization element to Lighting element. + Added PhotometryOrientation element to Lighting element. + Added Radiator to hydronicEquipTypeEnum. + Added 25 additional systemTypeEnums. +0.36 + 02/08 + Corrected buildingStoreyIdRef attribute in Space element. + Added HydronicLoopEquipmentId element to Space element. + Added airChangesSchedIdRef attribute to Zone element. + Add constraint to Surface element to have a minimum of four defined. + Corrected spelling error in PercentAreaDaylitControlled element in LightingControl element. + Added the following elements to allow USDA-FS to conduct urban/wildland interface fire modeling: BiomassDensity element to Vegetation element, WFTDTemp, WFTDRelHumidity, WFTDWindSpeed, WFTDWindDir, and WFTDPressure to Weather element, Fire to Material element and FireFace to Construction element. + Changed DocumentHistory/CreatedBy and DocumentHistory/ProgramInfo elements to be required. +0.35 + 8/06 + Corrected spelling error in capacityTypeEnum. + Added SingleFamily to buildingTypeEnum. + Replaced sequence requirement with unbound choice. +0.34 + 8/19/03 + Convert spaceTypeEnum facets to Pascal case and added DiningCafeteriaFastFood to buildingTypeEnum. Ensured all enumeration facets are in Pascal case. + 6/25/03 + Added CartesianPoint element to Results element. + Set minNumber for ZipcodeOrPostalCode element under the Location element to 1. + Set Campus element id attribute use to required. + Set Building element id attribute use to required. + Corrected Surface element id attribute use to required. +0.33 + 1/15/02 + Added PolyLoop element to RectangularGeometry for describing polygons + Changed CartesianPoint element to allow only two Coordinate elements for describing flat geometry + Added PeakDomesticHotWaterFlow element to Building + Created PeakDomesticHotWaterFlow element + Moved simple type elements into complex type elements - easier to find correct element + Added enumeration to conditionTypeEnum + 1/14/02 + Made HydronicLoopId repeatable in AirLoopEquipment and HydronicLoopEquipment + Added Name and Description as optional elements to the Results element + 1/10/02 + Created heatGainTypeEnum enumeration and assigned it to the PeopleHeatGain element + Created PeopleHeatGain element and added it to the Space element + Removed TotalPeopleHeatGain and PeopleLatentHeatGain elements + Added systemType attribute to the AirLoop element, and added systemTypeEnum enumeration + Removed Lighting element from gbXML root element + Replaced LightId in Campus and Building elements with Lighting elements that refrence LightingSystem elements directly + Changed InfiltrationFlow/@type to optional + Added an optional HydronicLoop/@primaryLoopId attribute +0.32 + 12/12/01 + Changed xsd:any namespace="##any" to ##other for GeneralGeometry and Meter elements + 12/10/01 + Removed gbXML/CreatorPersonInfo and gbXML/CreatorCadInfo + Created Surface/RectangularGeometry and Opening/RectangularGeometry and their children + Created gbXML/DocumentHistory and its children + Updated schema to version http://www.w3.org/2001/XMLSchema + Changed enumerations for spaceTypeEnum to remove spaces and slashes + Changed Results/@timeIncrement to type xsd:duration as required for the 2001 schema specification +0.31 + 8/1/01 + Added HydronicLoopId to AirLoopEquipment element + 7/26/01 + Added Control element to equipment elements + Added FlowControl element to HydronicLoop element + Added PressureControl element to AirLoop element + Added TemperatureControl element to both loop elements + Added enumerations to tempTypeEnum + 7/25/01 + Added xmlns to schema element + Added conditioningTypeEnum + Changed enumerations for efficiencyTypeEnum + Changed enumerations for resourceTypeEnum + Added stadardsTypeEnum + Added DependentValue element + Added IndependentValue element + Removed XValue element + Removed YValue element + Added RefrigerantType to AirLoopEquipment element + Changed structure of Data element + Added attributes to DeltaP element + Added conditionType attribute to DeltaT element + Made Name and Description elements optional in the DependentVariable element + Added minValue and maxValue attributes to the DependentVariable element + Changed structure of the Efficiency element + Allowed up to 2 IndependentVariable elements in the Equation element + Added fluidType attribute to the HydronicLoop element + Changed structure of HydronicLoopEquipment element + Made Name in IndependentVariable element optional + Changed structure of MinFlow element + Added id attribute to the Performance element + Changed structure of PointData element + 7/9/01 + Added hydronicLoopType to HydronicLoopId + Added HydronicLoopId to HydronicLoopEquipment + 7/3/01 + Added ShadeControl to Opening + Added ShadeSchedule, SolarOnOpening, HorizontalSolar, AirTempature, ZoneCoolingLoad, and Glare to ShadeControl + Added attributes, enumerations, and documentation for these new elements + Removed frameIdRef, blindIdRef, GlazeId, GapId from Opening - they are redundant and already contained in WindowType +0.30 + 6/28/01 + Changed Results element + Removed MaterialType - it did not add information +0.29 + 4/30/01 + Added StreetAddress element to Building - removed Location. + Changed SIResults element to useSIUnitsForResults. + Made several attributes at the gbXML level required instead of defaulting to SI. + Campus/@buildingType removed - Building/@buildingType required. + Changed SpaceId element to AdjacentSpaceId. + Moved CADObjectId after PlanarGeometry in the Opening element. + Made PlanarGeometry/@id optional. + Removed shadeTypeEnum (no longer used). + 4/27/01 + Added annotation to several elements and attributes. + Added Enum to the end of enumerations to clarify distinction between 'type', meaning simple type (used for enumerations) and 'type', used in classifying an object (example: buildingType, surfaceType). +0.28 + 4/12/01 + Added uValueUnit. + Made exposedToSun default = true. + 4/10/01 + Changed sequence of gbXML, Campus, Building, Surface, and Location children. + Added more of a description for Location. + Made Building/@id, Surface/@id optional. + Moved Surface to be child of Campus. + Added enumerations to conditionType. + Changed ShadingSurface element to ExposedToSun boolean attribute. + Renamed SpaceRef to SpaceId, @spaceRef to @spaceIdRef + Removed Name and Description from PlanarGeometry. + Added explicit right-hand-rule documentation to the PolyLoop element. + Constrained PolyLoop to at least 3 CartesianPoints. + Constrained CartesianPoint to exactly 3 Coordinates. + Added @lengthUnit, @areaUnit, @volumeUnit, and @temperatureUnit to the gbXML element. + Changed documentation for Latitude and Longitude. + 4/6/01 + Added targetNamespace. + Removed gbXML/@version. + Made gbXML/@id optional. + Removed GeoPraxisEAM from gbXML/@engine. + Renamed gbXML/SI to gbXML/SIResults and added documentation. + Removed @unit from Latitude and Longitude and made simple type - must be in decimal degrees. + Added global enumerations to be called out elsewhere. +0.27 + 2/13/01 + Removed buildingType attribute from Space. + Added spaceType attribute to Space. + Changed FloorArea to Area. + Changed CADObjectIdRef attribute to CADObjectId element to allow for multiples. + Changed to surface-centric model. + Changed Geometry to PlanarGeometry, ShellGeometry, and GeneralGeometry + 2/12/01 + Changing from X3D elements to ifcXML elements. + Removed X3D element. + Placed in alphabetical order. + Seperated complex types from simple type elements. + Changed back to sequence type - all had unusable restrictions. +0.26 + 2/7/01 + Replaced sequence types with type all. + Removed empty sequence elements. + + + + ACCA has given permission to gbxml authors to use this trademark ACCA and Manual J in the gbXML schema. Users of the gbXML schema are restricted from using the ACCA trademarks for any other purposes without express permission from ACCA. + + + "ASHRAE" is a registered service mark of the American Society of Heating, Refrigerating and Air-Conditioning Engineers, Inc. All rights reserved. + + + Enumerations + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Use to specify the most predominant building use type. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Enumeration value to be used when there is no building type information available. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Coefficient of Performance + + + + + Energy Efficiency Ratio + + + + + Seasonal Energy Efficiency Ratio + + + + + Annual Fuel Utilization Efficiency + + + + + Heating Seasonal Performance Factor + + + + + + + Thermal Efficiency + + + + + Motor Efficiency + + + + + Fan Efficiency + + + + + Mechanical Efficiency + + + + + Boiler Efficiency + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Used with the floorCategory attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + Used with the floorSlabPerimeterHeatLossCoefficientUnitType attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Used with the hOutsideUnitType attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Opening in a surface with a tilt between 45° and 149.99° with a non-operable glazed opening in it. + + + + + Opening in a surface with a tilt between 45° and 149.99° with a operable glazed opening in it. + + + + + Opening in a surface with a tilt between 0° and 44.99° with a non-operable glazed opening in it. + + + + + Opening in a surface with a tilt between 0° and 44.99° with a operable glazed opening in it. + + + + + Opening in a surface with a tilt between 45° and 149.99° with a sliding door in it. + + + + + Opening in a surface with a tilt between 45° and 149.99° with a non-sliding door in it. + + + + + Opening in a surface with a tilt between 0° and 180° with nothing in it. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Used with the roofASHRAENumber attribute. Each annotation for the values is only one example of many possible constructions for that ASHRAE roof number. The user should select the one that best matches the thermal characteristics of the roof, not necessarily the one closest to the example description. + + + + + Undefined + + + + + Steel sheet with 1 or 2 inches insulation + + + + + One inch wood with 1 inch insulation + + + + + Four inch light weight concrete + + + + + Two inch heavy weight concrete + + + + + One inch wood with 2 inch insulation + + + + + Six inch light weight concrete + + + + + 2.5 inch wood with 1 inch insulation + + + + + Eight inch light weight concrete + + + + + 4 inch hvy wt concrete with 1-2 inch insulation + + + + + 2.5 inch wood with 2 inches of insulation + + + + + Roof Terrace System + + + + + 6 inch hvy wt concrete with 1-2 inch insulation + + + + + 4 inch wood with 1-2 inches of insulation + + + + + + + Used with the roofCLTDIndex attribute. + + + + + Under attic or knee wall, U-value = 0.408 - 0.018, Unvented Attic, No Radiant Barrier, Any Roofing Material, Any Roof Color + + + + + Under attic or knee wall, U-value = 0.408 - 0.018, Vented Attic, No Radiant Barrier, Dark Asphalt Shingles or Dark Metal, Tar and Gravel or Membrane + + + + + Under attic or knee wall, U-value = 0.408 - 0.018, Vented Attic, No Radiant Barrier, White or Light Color Shingles, Any Wood Shake Light Metal, Tar and Gravel or Membrane + + + + + Under attic or knee wall, U-value = 0.408 - 0.02, Vented Attic, No Radiant Barrier, Dark Tile, Slate or Concrete + + + + + Under attic or knee wall, U-value = 0.408 - 0.02, Vented Attic, No Radiant Barrier, Light Tile, Slate or Concrete + + + + + Under attic or knee wall, U-value = 0.408 - 0.02, Vented Attic, No Radiant Barrier, White Tile, Slate or Concrete, White Metal or White Membrane + + + + + On exposed beams, U-value = 0.287 - 0.134, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.118 - 0.091, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.074 - 0.063, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.054, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.048, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.043 - 0.039, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.035, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.032 - 0.027, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + On exposed beams, U-value = 0.287 - 0.134, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.118 - 0.091, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.074 - 0.063, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.054, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.048, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.043 - 0.039, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.035, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.032 - 0.027, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + On exposed beams, U-value = 0.287 - 0.134, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.118 - 0.091, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.074 - 0.063, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.054, White or Light Color Tile Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.048, White or Light Color Tile Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.043 - 0.039, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.035, White or Light Color Tile Slate or Concrete, White Metal, White Membrane + + + + + On exposed beams, U-value = 0.032 - 0.027, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + Below roof joists, U-value = 0.241 - 0.109, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.09 - 0.076, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.069, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.051 - 0.047, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.034, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.029, Dark or Bold-Color Asphalt Shingle, Dark Metal, Dark Membrane, Dark Tar and Gravel + + + + + Below roof joists, U-value = 0.241 - 0.109, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.09 - 0.076, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.069, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.051 - 0.047, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.034, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.029, White or Light Color Asphalt Shingle, Any Wood Shake, Dark or Medium Color Tile, Slate or Concrete, Light or Unpainted Metal, Light or Silver Membrane, Light Tar and Gravel + + + + + Below roof joists, U-value = 0.241 - 0.109, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + Below roof joists, U-value = 0.09 - 0.076, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + Below roof joists, U-value = 0.069, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + Below roof joists, U-value = 0.051 - 0.034, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + Below roof joists, U-value = 0.029, White or Light Color Tile, Slate or Concrete, White Metal, White Membrane + + + + + + + Used with the roofColor attribute. Only enter Light if the construction is a roof and if it's in a rural location. + + + + + Dark colored or light colored in an industrial area + + + + + Permanently light colored and in a rural area + + + + + + + Used with the roofCTSType attribute. Each annotation for the values is only one example of many possible constructions for that CTS type. The user should select the one that best matches the thermal characteristics of the roof, not necessarily the one closest to the example description. + + + + + Metal Roof, R-19 Batt Insulation, Gyp Board + + + + + Metal Roof, R-19 Batt Insulation, Suspended Acoustical Ceiling + + + + + Metal Roof, R-19 Batt Insulation + + + + + Asphalt Shingles, Wood Sheathing, R-19 Batt Insulation, Gyp Board + + + + + Slate or Tile, Wood Sheathing, R-19 Batt Insulation, Gyp Board + + + + + Wood Shingles, Wood Sheathing, R-19 Batt Insulation, Gyp Board + + + + + Membrane, Sheathing, R-10 Insulation Board, Wood Deck + + + + + Membrane, Sheathing, R-10 Insulation Board, Wood Deck Suspended Acoustical Ceiling + + + + + Membrane, Sheathing, R-10 Insulation Board, Metal Deck + + + + + Membrane, Sheathing, R-10 Insulation Board, Metal Deck Suspended Acoustical Ceiling + + + + + Membrane, Sheathing, R-15 Insulation Board, Metal Deck + + + + + Membrane, Sheathing, R-10 plus R-15 Insulation Boards, Metal Deck + + + + + 2" Concrete Roof Ballast, Membrane, Sheathing, R-15 Insulation Board, Metal Deck + + + + + Membrane, Sheathing, R-15 Insulation Board, 4" LW Concrete + + + + + Membrane, Sheathing, R-15 Insulation Board, 6" LW Concrete + + + + + Membrane, Sheathing, R-15 Insulation Board, 8" LW Concrete + + + + + Membrane, Sheathing, R-15 Insulation Board, 6" HW Concrete + + + + + Membrane, Sheathing, R-15 Insulation Board, 8" HW Concrete + + + + + Membrane, 6" HW Concrete, R-19 Batt Insulation, Suspended Acoustical Ceiling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Department of Energy + + + + + Air-Conditioning and Refrigeration Institute + + + + + American National Standards Institute + + + + + National Electrical Manufacturers Association + + + + + + + + + A five-digit station identifier assigned by the World Meteorological Organization (WMO), used for international weather data exchange and station documentation. + + + + + Weather-Bureau-Army-Navy station id. A five-digit station identifier used at National Climatic Data Center (NCDC) for digital data storage and general station identification purposes. + + + + + A 4-letter indicator for geographical locations throughout the world, managed by the International Civil Aviation Organization (ICAO). + + + + + A 3- to 5-character alpha-numeric identifier assigned by the US National Weather Service (NWS), used for site identification in the NWS communications systems (e.g. AFOS, ROSA, SHEF, etc.). + + + + + An alpha-numeric identifier of up to 4-characters managed by the USDT Federal Aviation Administration (FAA), used for site identification of airports, weather stations and other sites vital to navigation. + + + + + A 6-digit number identifying a station in the NWS Cooperative Network. + + + + + A custom code identifying a virtual station for a particular grid location on the planet that was modeled by a simulation program such as MM5, WRF, or other weather simulation programs. + + + + + + + + + Surface adjacent to two conditioned or unconditioned spaces with a tilt between 45° and 149.99°. + + + + + Surface adjacent to one conditioned or unconditioned space and the outside with a tilt between 45° and 149.99°. + + + + + Surface adjacent to one conditioned or unconditioned space and the outside with a tilt between 0° and 44.99°. + + + + + Surface adjacent to two conditioned or unconditioned spaces with a tilt between 150° and 180°. + + + + + Surface not adjacent to any spaces with tilt between 0° and 180°. + + + + + Surface adjacent to one conditioned or unconditioned space and earth (soil) with a tilt between 45° and 149.99°. + + + + + Surface adjacent to one conditioned or unconditioned space and earth (soil) below grade with a tilt between 150° and 180°. + + + + + Surface adjacent to two conditioned or unconditioned spaces with a tilt between 0° and 44.99°. + + + + + Air membrane between two conditioned or unconditioned spaces with a tilt between 0° and 180°. + + + + + Surface adjacent to one conditioned or unconditioned space and earth (soil) with a tilt between 0° and 44.99°. + + + + + Surface adjacent to one conditioned or unconditioned space and the outside with a tilt between 150° and 180°. + + + + + Surface adjacent to one conditioned or unconditioned space and earth (soil) at grade with a tilt between 150° and 180°. + + + + + Freestanding column in space specified by its top surface having the same ifcGUID. + + + + + Embedded column in wall specified by a surface which is coplanar with (one of) the embedding wall(s). + + + + + + + + + Variable Temperature + + + + + Packaged Single-zone + + + + + Ceiling Induction + + + + + Constant-volume Reheat Fan + + + + + Variable-volume Fan + + + + + Powered Induction Unit + + + + + Packaged Variable Air Volume + + + + + Packaged Variable-Volume Variable-Temperature + + + + + Ceiling Bypass + + + + + Evaporative Cooling + + + + + Multizone Fan + + + + + Packaged Multizone + + + + + Dual-duct Fan + + + + + Fan Coil + + + + + Induction Unit + + + + + Packaged Terminal Air Conditioner + + + + + Water Loop Heat Pump + + + + + Residential with cycling furnace and cycling air-conditioner + + + + + Residential Variable-Volume Variable-Temperature + + + + + Floor Panel Heating + + + + + Heating and Ventilating + + + + + Unit Heater + + + + + Unit Ventilator + + + + + Central Heating Radiators + + + + + Central Heating Convectors + + + + + Central Heating Radiant Floor + + + + + Central Heating Hot Air + + + + + Other Room Heater + + + + + Radiant Heater No Flue + + + + + Radiant Heater Flue + + + + + Radiant Heater Multiburner + + + + + Forced Convection Heater Flue + + + + + Forced Convection Heater No Flue + + + + + VAV Single Duct + + + + + VAV Dual Duct + + + + + VAV Reheat Fan + + + + + VAV Indoor Packaged Cabinet + + + + + Constant Volume Fixed OA + + + + + Constant Volume Variable OA + + + + + Constant Volume Terminal Reheat + + + + + Multizone Hot Deck Cold Deck + + + + + Constant Volume Dual Duct + + + + + Radiant Cooled Ceilings + + + + + Active Chilled Beams + + + + + Variable Refrigerant Flow + + + + + Split Systems With Natural Ventilation + + + + + Split Systems With Mechanical Ventilation + + + + + Split Systems With Mechanical Ventilation With Cooling + + + + + Exhaust Air Systems + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Used with the wallASHRAEGroup attribute. Each annotation for the values is only one example of many possible constructions for that ASHRAE wall group. The user should select the one that best matches the thermal characteristics of the wall, not necessarily the one closest to the example description. + + + + + 4-in. face brick, insulation + 8-in. com. brick + + + + + 4-in. face brick, 2-in. insul., 4-in. com. brick + + + + + 4-in. face brick, air space + 4-in. face brick + + + + + 4-in. face brick + 4-in. common brick + + + + + 4-in. face brick + 4-in. block + + + + + 4-in. block + air space or insulation + + + + + Metal curtain wall + 1 to 3-in. insulation + + + + + + + Used with the wallColor attribute. Only enter Light or Medium if the construction is a wall and if it's in a rural location. + + + + + Dark colored or light colored in an industrial area + + + + + Permanently medium colored and in a rural area + + + + + Permanently light colored and in a rural area + + + + + + + Used with the wallCTSType attribute. Each annotation for the values is only one example of many possible constructions for that CTS type. The user should select the one that best matches the thermal characteristics of the wall, not necessarily the one closest to the example description. + + + + + Spandrel Glass, R-10 Insulation Board, Gyp Board + + + + + Metal Wall Panel, R-10 Insulation Board, Gyp Board + + + + + 1" Stone, R-10 Insulation Board, Gyp Board + + + + + Metal Wall Panel, Sheathing, R-11 Batt Insulation, Gyp Board + + + + + 1" Stone, Sheathing, R-11 Batt Insulation, Gyp Board + + + + + Wood Siding, Sheathing, R-11 Batt Insulation, 1/2" + Wood + + + + + 1" Stucco, Sheathing, R-11 Batt Insulation, Gyp Board + + + + + EIFS Finish, R-5 Insulation Board, Sheathing, Gyp Board + + + + + EIFS Finish, R-5 Insulation Board, Sheathing, R-11 Batt Insulation, Gyp Board + + + + + EIFS Finish, R-5 Insulation Board, Sheathing, 8" LW CMU, Gyp Board + + + + + Brick, R-5 Insulation Board, Sheathing, Gyp Board + + + + + Brick, Sheathing, R-11 Batt Insulation, Gyp Board + + + + + Brick, R-5 Insulation Board, Sheathing, R-11 Batt Insulation Gyp Board + + + + + Brick, R-5 Insulation Board, 8" LW CMU + + + + + Brick, 8" LW CMU, R-11 Batt Insulation, Gyp Board + + + + + Brick, R-5 Insulation Board, 8" HW CMU, Gyp Board + + + + + Brick, R-5 Insulation Board, Brick + + + + + Brick, R-5 Insulation Board, 8" LW Concrete, Gyp Board + + + + + Brick, R-5 Insulation Board, 12" HW Concrete, Gyp Board + + + + + Brick, 8" HW Concrete, R-11 Batt Insulation, Gyp Board + + + + + 8" LW CMU, R-11 Batt Insulation, Gyp Board + + + + + 8" LW CMU w Fill Insulation, R-11 Batt Insulation, Gyp Board + + + + + 1" Stucco, 8" HW CMU, R-11 Batt Insulation, Gyp Board + + + + + 8" LW CMU w Fill Insulation + + + + + 8" LW CMU w Fill Insulation, Gyp Board + + + + + 12" LW CMU w Fill Insulation, Gyp Board + + + + + 4" LW Concrete. R-5 Board Insulation, Gyp Board + + + + + 4" LW Concrete. R-11 Batt Insulation, Gyp Board + + + + + 4" LW Concrete. R-10 Board Insulation, 4" LW Concrete + + + + + EIFS Finish, R-5 Insulation Board, 8" LW Concrete, Gyp Board + + + + + 8" LW Concrete. R-11 Batt Insulation, Gyp Board + + + + + EIFS Finish, R-10 Insulation Board, 8" HW Concrete, Gyp Board + + + + + 8" HW Concrete. R-11 Batt Insulation, Gyp Board + + + + + 12" HW Concrete. R-19 Batt Insulation, Gyp Board + + + + + 12" HW Concrete + + + + + + + Used with the wallGroupManualJ attribute. + + + + + Fastest heat transfer + + + + + + + + + + + + + + + + + + + + + + + + + Medium heat transfer + + + + + + + + + + + + + + + + + + + + + + + + + Slowest heat transfer + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Analysis engine used + + + + + + + + + + + + This attribute specifies the default temperature unit for the entire gbXML document, wherever the temperatureUnit simple type is used. + + + + + This attribute specifies the default length unit for the entire gbXML document, wherever the lengthUnit simple type is used. + + + + + This attribute specifies the default area unit for the entire gbXML document, wherever the areaUnit simple type is used. + + + + + This attribute specifies the default volume unit for the entire gbXML document, wherever the volumeUnit simple type is used. + + + + + Results will be given in SI or IP units. True = SI units which is the implied default. If False, results will be in english units. + + + + + + + + + Absorptance of the outside surface + + + + + + + + + + + + + + + ID for a space that is bounded by this surface. First AdjacentSpaceId entered will determine how the referenced construction layers are ordered with the first construction layer being in contact with the outside or 2nd space listed and the last layer in contact with the first space listed. The outward normal of the surface, as defined by the right hand rule of the coordinates in the planar geometry element, is always pointing away from the first AdjacentSpaceID listed. + + + + + With interior horizontal surfaces, this attribute can distinguish between ceiling and floor surfaces to avoid double-counting of floor areas, etc. May be required in future schema versions. + + + + + + + + + + + + + + + + + + + + + + + The AirLoop element represents the equipment serving one path of air + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + A piece of equipment serving an air-loop. This is generalized to be able to contain any type of air loop equipment. The air loop equipment are ordered as they exist on the airloop. A blow-through system should have fan listed before the coils. A draw-through system should have the fan listed after the coils. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + ID pointing at an air loop equipment object + + + + + + + + + ID pointing to an air loop + + + + + + + Minimum air temperature before shades are closed. + + + + + + + + + + + + + Cooling and Heating Reset Temperature Type + + + + + + + + + + + + + Reflectance of solar radiation + + + + + + + + + + + + Alternative energy source + + + + + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + Specifies the area of the space or building. This value should be equal to the area enclosed by the physical boundaries of the space, as defined by the intersection of the horizontal plane at the highest floor height and the volumetric model's vertical interior surface planes. Building area should be the sum of space areas whose PeopleNumber element values are greater than zero. + + + + + + + + + + + + + Polygon azimuth. The direction of the outward normal for the polygon (surface or opening) defined in RectangularGeometry element. Azimuth is measured in degrees clockwise from North, where North = 0 degrees. Examples: East = 90 deg, South = 180 deg, West = 270 deg. If CADModelAzimuth is defined, the Azimuth value is relative to the CADModelAzimuth value rather then North. + + + + + + + + + + + Type of ballast used + + + + + Month and day year schedule begins + + + + + Date of holiday modeled + + + + + Biomass of vegetation + + + + + + + + + + + + + + + + + + + + + + + + + + Leakage value obtained from blower door test. + + + + + + + + + + + + + + + + + + + + + + Use this element to define the shell of the building. + + + + + This element establishes the logical relation of a given part of the building envelope such that its PlanarGeometry is part of an exterior surface of the building. + + + + + + + + + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + Captures Building Storey Structure + + + + + + + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + + + ID used by a CAD/BIM program to reference its internal materials library + + + + + + + The number of degrees (0 to 360) of the angle of positive Y axis from north. (Value of the of Azimuth of Model's North relative to Cardinal North) + + + + + The CADModelId Element is used to map a CAD model or BIM to its corresponding gbXML file. Allows the CAD/BIM tool referenced by the programIdRef to correlate the gbXML file to its CAD/BIM model. + + + + + + + ID reference to unique CAD/BIM program defined in DocumentHistory/ProgramInfo element. + + + + + + + + + The CADObjectId Element is used to map unique CAD object identifiers to gbXML elements. Allows CAD/BIM tools to read results from a gbXML file and map them to their CAD objects. + + + + + + + ID reference to unique CAD/BIM program defined in DocumentHistory/ProgramInfo element. + + + + + + + + + The Campus element should be used as the base for all physical objects. On a campus, place one or more buildings. + + + + + + + + + + + + + + + + + + + + + + + + ID for the weather data used for a heating design day + + + + + ID for the weather data used for a cooling design day + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + + + + + + + + + + + This is the x, y, and z distances from the origin. This element must have three Coordinate elements when representing 3-d space, which represent x, y and z in order. This element must have two Coordinate elements when representing 2-d space. + + IfcCartesianPoint + + + + + + + + + + + Cooling degree days. CDD is calculated as the number of degrees a mean daily temperature is above a value (specified as the Temperature element), for each day. For example, if the mean temperature in a region rises to 77 degrees for three days during a year, the rest of the time staying below 75, and the Temperature element is set at 75, then CDD = 6. + + + + + + + + + + + Cooling degree day value + + + + + + This is an element from ifcXML that describes a collection of faces that make up a closed shell. + + IfcClosedShell + + + + + + + + + + + Parameters required by CLTD load calculation method, as described in the 1989 ASHRAE Handbook, Fundamentals volume, pages 26.32 and following. Ignore this element and all elements under it if you are not planning to use that method. + + + + + Specifies the number 1 to 13 to use for a roof. See Table 29 page 26.34, 1989 ASHRAE Handbook, Fundamentals volume. + + + + + Specifies the wall group code A to G to use for a wall. See Table 31, page 26.36, 1989 ASHRAE Handbook, Fundamentals volume. + + + + + Specifies the effective roof color, which depends on the actual color and whether the location is industrial or rural. See page 26.34, 1989 ASHRAE Handbook, Fundamentals volume. + + + + + Specifies whether or not a roof has a suspended ceiling under it. See table 29, page 26.34, 1989 ASHRAE Handbook, Fundamentals volume. + + + + + Specifies the effective wall color, which depends on the actual color and whether the location is industrial or rural. See page 26.36, 1989 ASHRAE Handbook, Fundamentals volume. + + + + + + + The coefficient of utilization is the ratio of luminous flux on a work plane to the luminous flux emitted by the lamps alone. + + + + + + + + + + Room cavity ratio is a measure of the room cavity proportions. + + + + + + + + + + + + + + + + + + + Conductivity as a function of temperature + + + + + + + + + + + + A Construction is a combination of layers, such as a wall or a roof + + + + + + + + + + + + + + + + + + Reference to layers that comprise this construction. Multiple LayerId's order is important and specifies the layering of the layer's referenced materials. The first Surface/AdjacentSpaceId entered will determine how the referenced construction layers are ordered with the first construction layer being in contact with the outside or 2nd Surface/AdjacentSpaceId listed and the last layer in contact with the first Surface/AdjacentSpaceId listed. + + + + + This element is for purposes of modeling BIPV (building integrated photovoltaics). + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + Please specify the program that added this element. + + + + + + + The Control element should be used for describing how this equipment is controlled. + + + + + + + + + + + + This is the an x, y, or z length measurement from the origin. + + IfcLengthMeasure + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Denotes the time this file has been created, as well as the person and program used to create this file. + + + + + + + + + + + + + + Number of operation cycles per week. + + + + + + + + + + + + + + + + + ID for operation schedules + + + + + + + + + + Allows for including daylight savings time + + + + + Set of values that define the profile of one 24 hour period divided equally to the number of values entered. + + + + + + + + + + + + + + Day of the month that the cooling design day occurs on + + + + + Day of the month that the heating design day occurs on + + + + + Flag for daylight savings on the cooling design day + + + + + Flag for daylight savings for the heating design day + + + + + Cooling design day dry bulb temperature + + + + + + + + + + + + Heating design day dry bulb temperature + + + + + + + + + + + + Cooling design day dry bulb temperature range + + + + + + + + + + + + Heating design day dry bulb temperature range + + + + + + + + + + + + Ground temperature on the cooling design day + + + + + + + + + + + + Ground temperature for the heating design day + + + + + + + + + + + + Cooling design day hour of high temperature + + + + + Heating design day hour of high temperature + + + + + Cooling design day hour of low temperature + + + + + Heating design day hour of low temperature + + + + + Month the cooling design day lands on. 1=Jan 12=Dec + + + + + Month that the heating design day occurs on. 1=Jan 12=Dec + + + + + Cooling design day atmospheric pressure + + + + + + + + + + + + Heating design day atmospheric pressure + + + + + + + + + + + + Flag for rain on the cooling design day. 0=no rain 1=rain + + + + + Heating design day rain flag. 0=no rain, 1=rain + + + + + Cooling design day sky clearness + + + + + + + + + + + + Heating design day sky clearness + + + + + + + + + + + + Flag for snow on the cooling design day. 0=not snowing 1=snowing + + + + + Heating design day snow flag. 0=not snowing 1=snowing + + + + + Cooling design day wet bulb temperature + + + + + + + + + + + + Heating design day wet bulb temperature + + + + + + + + + + + + Cooing design day wind direction + + + + + + + + + + + + Heating design day wind direction + + + + + + + + + + + + Cooling design day wind speed + + + + + + + + + + + + Heating design day wind speed + + + + + + + + + + + + Difference in pressure + + + + + + + + + + Use this attribute for conditioning units with both heating and cooling. + + + + + Use this attribute to point to a performance curve if one is provided + + + + + + + + + Difference in temperature + + + + + + + + Use this attribute for conditioning units with both heating and cooling. + + + + + + + + + + + + + + + + + + Density as a function of temperature + + + + + + + + + + + + Value of a dependent variable for this data point + + + + + + + + + + + + + Use minValue and maxValue to define constraints on the curve. + + + + + + + + + Design temperature for cooling + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + Design temperature for heating + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Contains details about people and programs that created and modified this file + + + + + + + + + + + + + + + + + Use the standardsType attribute when the efficiency is measured at standard conditions. + + + + + + Use this attribute for conditioning units with both heating and cooling. + + + + + Use this attribute to point to a performance curve if one is provided + + + + + + + + + Electric load + + + + + + + + + + + + Height above sea level + + + + + + + + + + + + + + + + + + + + + + Infra-red emissivity + + + + + + + + + + + + + Month and day year schedule ends + + + + + + + + + + + + + ID for a resource meter + + + + + + + + + Minimum enthalpy setpoint for air-side economizer. + + + + + + + + + + + + The Equation element allows data for n-dimensional algebraic data to be entered. The Expression element should contain an dependent variable followed by an = sign and an equation containing independent variables. The independent and dependent variables are then defined by their respective tags. The only mathematical operations allowed are ^ (power), + (addition), - (subtraction), / (division), and * (multiplication). Example: z=x+2*y^2 would have an DependentVariable with a name = z, and two IndependentVariable(s) with names of x and y. Descriptions would be included for each variable, along with unit and dataType information identifying the variable and its role. + + + + + + + + + + + + Amount of power used by equipment in a given area + + + + + + + + Please specify the program that added this element. + + + + + + + + + + External equipment. This is generalized to be able to contain any type of external equipment. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ID for the schedule of transmittance of a shading surface + + + + + + ID pointing to a hydronic loop + + + + + + + + ID pointing to an air loop + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + Properties of a material under fire conditions. + + + + + + + + + + + + + Properties of a construction under fire conditions. + + + + + + + + + + + + + + + + + + Specifies the heat loss from a floor in terms of the amount of heat lost per length of exposed perimeter. Used with both commercial and residential load calculation methods. + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + Use this element to describe how the fluid flow is controlled. The temperatures specified in this element should be measured just upstream of the chiller. + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + + + + + + + + + + + + Space between window panes + + + + + + + + + + + + + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + + + + + ID for a gap between window panes + + + + + + + This element has been left open for use with other geometry definitions, such as X3D or BLISXML. + + + + + + + + + + Minimum amount of glare to trigger the shades to close + + + + + + + + + + + + Properties of one layer of a window + + + + + + + + + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + + + + + + + + + + + + + + ID indicating the type of glaze used + + + + + + + Monthly ground temperatures, 12 values + + + + + + + + + + + + + + + + + + + + + + Heating degree days. HDD is calculated as the number of degrees a mean daily temperature is below a value (specified as the Temperature element), for each day. For example, if the mean temperature in a region drops to 64 degrees for four days during a year, the rest of the time staying above 65, and the Temperature element is set at 65, then HDD = 4. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Amount of horizontal solar radiation striking the opening before a shade is closed. + + + + + + + + + + + + h (Outside) is the combined coefficient of heat transfer by long-wave radiation and convection at outer surface, Btu/h-sf-F. See 2001 HoF Chapter 25 Table 1. + + + + + + + Used by the hOutsideUnitType attribute. + + + + + + + + + The HydronicLoop element represents the equipment serving one path of water, or other liquid + + + + + + + + + + + + + + + + + + Only use this attribute for secondary loops to reference the primary loop. + + + + + Please specify the program that added this element. + + + + + + + A piece of equipment serving a hydronic loop (most commonly a water loop). This is generalized to be able to contain any type of hydronic loop equipment. + + + + + + + + + + + + + Use this element to point to another loop. For instance if this is a chiller and is a child of a chilled water loop, use this element to point at a cooling water loop. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + ID pointing at a hydronic loop equipment object + + + + + + + + + ID pointing to a hydronic loop + + + + + Type of hydronic loop + + + + + + + + + + + + + + + + This is an X3D element to map a texture to this material. From X3D specification: The ImageTexture node defines a texture map by specifying an image file and general parameters for mapping to geometry. Texture maps are defined in a 2D coordinate system (s, t) that ranges from [0.0, 1.0] in both directions. The bottom edge of the image corresponds to the S-axis of the texture map, and left edge of the image corresponds to the T-axis of the texture map. The lower-left pixel of the image corresponds to s=0, t=0, and the top-right pixel of the image corresponds to s=1, t=1. + + + + + + Specifies location of the image. From X3D specification: The texture is read from the URL specified by the url field. When the url field contains no values, texturing is disabled. Browsers shall support the JPEG and PNG image file formats. In addition, browsers may support other image formats (e.g. CGM) which can be rendered into a 2D image. Support for the GIF format is also recommended (including transparency). + + + + + From X3D specification: If repeatS is TRUE, the texture map is repeated outside the [0.0, 1.0] texture coordinate range in the S direction so that it fills the shape. If repeatS is FALSE, the texture coordinates are clamped in the S direction to lie within the [0.0, 1.0] range. + + + + + From X3D specification: If repeatT is TRUE, the texture map is repeated outside the [0.0, 1.0] texture coordinate range in the T direction so that it fills the shape. If repeatT is FALSE, the texture coordinates are clamped in the T direction to lie within the [0.0, 1.0] range. + + + + + + + Value of the independent variable for this data point + + + + + + + + + + + + + + + + + + + + + + + ID for the schedule of transmittance of a shading surface + + + + + + + Flow of air through building envelope + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + Thermal resistance of the internal air film in a layer + + + + + + + + + + + + Interior equipment. This is generalized to be able to contain any type of internal equipment. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ID for the schedule of transmittance of a shading surface + + + + + + ID pointing to a hydronic loop + + + + + + + + ID pointing to an air loop + + + + + Please specify the program that added this element. + + + + + + + + + ID pointing at an interior equipment object + + + + + + + Type of lamp used + + + + + + + + + + + + + + + Degrees north of the equator, in decimal degrees. Locations south of the equator are negative. + + + + + A Layer is a combination of one of more materials + + + + + + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + + + + + ID identifying a layer in this construction. Multiple layers in Constructions are in order from outside to inside. + + + + + + + + + + + + + + + + Building storey Local Placement Z coordinate. + + + + + + + + + + + + + + + + + + + + + + + + + ID for the schedule of transmittance of a shading surface + + + + + + + + + + + Identifier pointing at a light object + + + + + + + + + + + + + + + + + + ID for a resource meter + + + + + Please specify the program that added this element. + + + + + + + + + + + + Illuminance level that the lights are maintained if daylighting controls present (lightControlTypeEnum not equal to on off). + + + + + Location where Illuminance value is specified. + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + Amount of power used by lighting in a given area + + + + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + + The Location element describes the location of the global origin for this campus. This element can be used in a general sense by using the Name and Description elements for a physical address or landmark, or Location can be made precise by specifying the Elevation, Longitude, and Latitude of the origin. + + + + + + + + + + + + + + + + + Degrees east of Greenwich, in decimal degrees. Locations west of Greenwich are negative. + + + + + + + + + + + + + + + Type of luminaire used + + + + + Parameters required by the ACCA Manual J Eighth Edition load calculation method. Ignore this element and all elements under it if you are not planning to use that method. + + + + + Specifies the group code for the wall, which determines how quickly the wall delivers heat into the space. See Table 4C, page T4C-1 ACCA Manual J Eighth Edition. + + + + + Specifies the array of CLTD values to use for the roof, which determines how quickly the roof delivers heat into the space. See Table 4A page T4A-18 and following, ACCA Manual J Eighth Edition. + + + + + Specifies whether or not to use a higher heating temperature difference value in calculating the heating loss for a floor (25 degrees F higher value is used in formula if floor is radiant). See Figure A5-5, page A5-8, ACCA Manual J Eighth Edition. + + + + + Specifies the u-value of the crawl space wall that is associated with the floor, which affects both the heating and cooling load. See Figure A5-5, page A5-8, and Figure A5-17, page A5-16, ACCA Manual J Eighth Edition. + + + + + Specifies whether or not the crawl space wall associated with this floor is well sealed from the outdoor air, which affects both the heating and cooling loads. See Figure A5-5, page A5-8, and Figure A5-17 page A5-16, ACCA Manual J Eighth Edition. + + + + + Specifies the type of floor being considered, which affects both the heating and the cooling loads. See Table 4A, pages T4A-27 and following, ACCA Manual J Eighth Edition. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + + + + + ID identifying a material in this layer. Multiple materials in layers are in order from outside to inside. + + + + + Percentage (1-100%) of this layer that this material is made from. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Description of a resource measurement + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + ID for a resource meter + + + + + + + + Minimum flow + + + + + + + + + + + + + + + + + + + + + + + + + + + Fraction of light illuminance used at minimum setting. + + + + + + + + + + + + + + Fraction of power used at minimum setting. + + + + + + + + + + + + + + + + + + + + Denotes the time this file has been modified, as well as the person and program used to modify this file. + + + + + + + + + + Indicates if the motor is located in the path of the air stream. + + + + + + Natural ventilation does not occur above this temperature. + + + + + + + + + + + + Natural ventilation does not occur below this temperature. + + + + + + + + + + + + Occupancy dependence of natural ventilation. If true, then natural ventilation only occurs when people are present. + + + + + + Outside air flow per area + + + + + + + + Please specify the program that added this element. + + + + + + + + + Outside air flow per person + + + + + + + + Please specify the program that added this element. + + + + + + + + + Indicates which object or objects the results apply to, if any + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Identifier pointing to a construction + + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + + + + + + + + + + + + + + Schedule of operation for a piece of equipment. For heating and cooling equipment (such as a boiler or chiller) this is the availability schedule, the schedule of temperatures that, when reached, the piece of equipment will be available. + + + + + + + Use this element to report overall domestic hot water use for the domestic hot water loop referenced with the hydronicLoopIdRef attribute. The schedule referenced by waterUseScheduleIdRef defines a hot water use fraction schedule, which when combined with the value of PeakDomesticHotWaterFlow fully describe the hot water use of this building. + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + Occupancy of the space + + + + + + + + Please specify the program that added this element. + + + + + + + + + Amount of heat added to the space by people + + + + + + + + + Please specify the program that added this element. + + + + + + + + + Percent of space area whose lights are controlled by a daylight sensor. Only applicable when LightPowerPerArea is used at the space level and not Lighting Systems. + + + + + + + + + + + + Percent of original building shell used in renovation + + + + + + + + + + + + Used to represent part-load performance and other performance metrics + + + + + + + + + + + + + The rate water vapor is allowed through a surface + + + + + + + + + + + + Contains information about people that created and modified this file + + + + + + + + + + + + + + + + + + + + + + + This element has been left open for use with other photometry definitions. Photometric data is required for various forms of lighting analysis. This tag provides a way for the photometric data to be passed. Since this can be done in a variety of ways (iesna LM-63, cibse TM14, ELUMDAT, etc.) a specific format is not being specified. + + + + + This element specifies the position and the x, y and z axis of the light source. This element must have four Coordinate elements which represent the position and the x, y and z axis in order. The first CartesianPoint element specifies the position of the light source. The second, third and fourth CartesianPoint elements specifies the displacement points for the positive x, positive y and positive z axis. + + + + + + + + + + List of points defining a loop. There are no repeated points in the list. All data are global, with the assumption that positive Z is up, and if CADModelAzimuth is undefined or zero, positive X is East and positive Y is North. If CADModelAzimuth is defined it is the angle of positive Y to North, positive X is the vectorial product of Y and Z. If geometry is to be precise, use Longitude, Latitude and Elevation in the Location element to define the origin. Otherwise the origin is an arbitrary point. Use PlanarGeometry to define a three dimensional polygon that lies on a plane, and has no self-intersection. + + + + + + + + + + + + + The PointData element allows for graph or tabular information to be entered. The data type and units of the independent and dependent variables are defined in their respective elements. Each Data element represent a data point, with each Value element representing the value of a variable - starting with the independent variable, then each dependent variable listed in the order defined. + + + + + + + + + + + + This is a list of coordinates that make up a polygon in three-dimensional space. All coordinates must lie on the same plane. The right-hand rule applies for defining the outward normal of a surface: For every surface, points must be defined in order, such that the direction of (the average cross-product between (any point, the centroid of the surface, and the next point)) points in the direction of the outward normal, which is a vector pointing away from the first AdjacentSpaceID listed. + + IfcPolyLoop + + + + + + + + + + + the ratio of the total amount of void space in a material (due to poses, small channels, and so on) to the bulk volume occupied by the material. + + + + + + + + + + + + Maximum consumption of energy (power input) + + + + + + + + + + ID for a resource meter + + + + + + + + + + Prandtl number as a function of temperature + + + + + + + + + + + + Use this element to describe how the air pressure is controlled. The temperatures specified in this element should be measured just upstream of the air handlers. + + + + + + + + + + + + + + + + + + + Contains information about the originating or modifying file or service that created or modified this file + + + + + + + + + + + + Contains information about programs that created and modified this file + + + + + + + + + + + + + + + + + + + + + + + + + + Geometry data in a form typically used in simulation engines. For surfaces, specify the location of the bottom-left corner with the CartesianPoint element when facing it from the outside. Also for surfaces, Tilt and Azimuth must be specified. If CADModelAzimuth is defined, the Azimuth value is relative to the CADModelAzimuth value rather then North. For openings, the third Coordinate should be zero or left missing. For openings, these Coordinates represent the distance of the bottom-left of the opening to the bottom-left corner of the parent surface. Azimuth and Tilt should not be specified for opening. There is an optional PolyLoop element, which may be used for describing the polygon shape of the surface. + + + + + + + + + + + This is a two-dimensional polygon, with the origin at the point specified with RectangularGeometry/CartesianPoint. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Please specify the program that generated the results using the id of the ProgramInfo element. + + + + + Please specify how the values were obtained. + + + + + Use the startTime attribute to define the start of the first Value element. + + + + + This specifies the duration of time between each Value element. This is required if more than one Value element exists in a this Results element. + + + + + + + Roughness of the outside surface + + + + + + + + Parameters required by RTS load calculation method, as described in pages 29.25 and following of the 2001 ASHRAE Handbook, Fundamentals volume. Ignore this element and all elements under it if you are not planning to use that method. + + + + + + + + Specifies which Conduction Time Series to use for the roof. See Table 21, page 29.30, 2001 ASHRAE Handbook, Fundamentals volume. Only used if the construction is a roof and you are using the RTS calculation method. + + + + + Specifies which Conduction Time Series to use for the wall. See Table 20, page 29.28, 2001 ASHRAE Handbook, Fundamentals volume. Only used if the construction is a wall and you are using the RTS calculation method. + + + + + + + Resistance of material + + + + + + + + + + + + + The ShadeControl element is a bit complex, as shading control is handled differently in different simulation engines. Each child of ShadeControl represents a criteria to help determine if the shades will be open or closed. The resulting shade action should be a union of these. Example: You have included a ShadeSchedule, which is a 1 on weekdays and a 0 on weekends, and a SolarOnOpening. The shade will be drawn when the solar radiation on this opening is greater than SolarOnOpening and it is a weekday. + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + Use this element to define schedules for a shade. + + + + + + + + List of year schedules that make up an entire calendar year. + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + Value for one block of time. Divides a day evenly into number of ScheduleValue elements defined in DaySchedule. Example: If 12 ScheduleValue elements are defined, each will represent two hours + + + + + Distance from outside surface of window to outside surface of wall + + + + + + + + + + + + Shading Coefficient: The ratio of total solar transmittance for the specified glazing system to the total solar transmittance for the standard reference glazing (1/8" clear). + + + + + + + + + + + + All data are global, with the assumption that positive Z is up, and if CADModelAzimuth is undefined or zero, positive X is East and positive Y is North. If CADModelAzimuth is defined it is the angle of positive Y to North, positive X is the vectorial product of Y and Z. If geometry is to be precise, use Longitude Latitude, and Elevation in the Location element to define the origin. Otherwise the origin is an arbitrary point. ShellGeometry is used to define a union of closed shells, where there is no intersection of any two of the given shells. + + + + + + + + + + + + + + + + + + + + + + + When element is a child of WindowType: The center of glass solar heat gain coefficient. When element is a child of Opening: The overall solar heat gain coefficient for the window assembly. + + + + + + + + + + + + + Amount of solar radiation striking the opening before a shade is closed. + + + + + + + + + + + + A space represents a volume enclosed by surfaces. + + + + + + + + + + + + + + + + + + + Planar polygon that represents the perimeter of space and whose area is equal to the floor area of the space. + + + + + Planar polygons that represent the interior surfaces bounding the space and whose volume is equal to the volume of the space. + + + + + + + + + + + + + + + + spaceType represents how a space is used. + + IfcPolyLoop, an IESNA and ASHRAE project for determining lighting power density for individual spaces. + + + + + + ID for the schedule of transmittance of a shading surface + + + + + ID of the schedule for lights contained in this space + + + + + ID for schedule of equipment use + + + + + ID for schedule of people in this space + + + + + + ID for BuildingStorey this space is on. + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + This element establishes the logical relation of a given part of the space ShellGeometry such that its PlanarGeometry is part of an interior surface bounding the space. + + + + + + + + Global Unique ID from Industry Foundation Class (IFC) + file. + + + + + If this attribute is set to true, the boundary is important in heat flow calculation. + + + + + Connects the space boundary to a surface representing a building element (or representing the open air). + + + + + If surface referenced by SpaceBoundary is adjacent to two spaces, then this references the corresponding SpaceBoundary of the opposite space. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + First AdjacentSpaceId entered will determine how the referenced construction layers are ordered with the first construction layer being in contact with the outside or 2nd space listed and the last layer in contact with the first space listed. + + + + + + + + + + + + Identifier pointing to a construction + + + + + ID for the schedule of transmittance of a shading surface + + + + + This attribute specifies whether or not direct beam radiation from the sun will ever hit this surface. Only relevant to exterior surface types. + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + + + Temperature + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + + Use this element to describe how the temperature is controlled. The temperatures specified in this element should be measured just upstream of the most critical piece of equipment (air handlers for a chilled water and hot water loops the chiller for the cooling water loop). + + + + + + + + + + + + + + + + + + + + + The number of degrees from up that the outside of the surface is tilted in relation to the bottom-left corner. + + + + + + + + + + + Time or period that results represent. Example: if timeType = Month and TimeIncrement = 1 the Value would be for the month of January or if timeType = Hour and TimeIncrement = 4 the 81st the Value would be for January 14 at noon. + + + + + + + + + + + + Transmittance of shading surface + + + + + + + + + + + + + Public transportation + + + + + + + + + + + + ID for the schedule of transmittance of a shading surface + + + + + Please specify the program that added this element. + + + + + + + User/project defined Code for Space or Zone. + + + + + + We have left the UtilityRate element open to allow for use with billing schemas. + + + + + + Overall conductance + + + + + + + + + + + + Contains the actual numerical result value. If series result type each Value element contains the one value and is in sequential order for the series. + + + + + Plants, trees, etc. on a campus + + + + + + + + + + + + + + + + Refers to a building-level surface. + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + Viscosity as a function of temperature + + + + + + + + + + + + Specifies the volume of the space. This value should be equal to the volume enclosed by the actual physical boundaries of the space, as defined by the volumetric model's interior surfaces bounding that space. + + + + + + + + + + + + Waste water heat recovery efficiency + + + + + + + + ID pointing to an air loop + + + + + ID pointing to a hydronic loop + + + + + + + + + This includes blowdown (draining a cooling tower to clean), drift (water loss from water sprayed), and evaporation in cooling towers. + + + + + + + + + + + + + + + + + + + + + Amount of water used for water using equipment per cycle of operation. + + + + + + + + + + + + The amount of heat from this equipment rejected to this space. For condensors exterior to building, this value will be 0. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Please specify the program that added this element. + + + + + + + + Set of day schedules all assigned to a unique particular period of the week using the day type attribute. Do not schedule conflicting day types to the same week schedule. + + + + + + + + + + + + + + + + + + + + + + + + + + + + Wildfire threat design atomospheric pressure. + + + + + + + + + + + + Wildfire threat design relative humidity. + + + + + + + + + + + + Wildfire threat design dry bulb temperature. + + + + + + + + + + + + Wildfire threat design wind direction + + + + + + + + + + + + Wildfire threat design wind speed. + + + + + + + + + + + + Wind speed for wind turbine. + + + + + + + + + + + + + + + + + + + + + + One WindowType should be created for each type of window. Create a Blind, Frame, and Cost for each OpeningType, and then add as many Glaze and Gaps as exist in the window. Place these Glaze and Gap elements in order from outside to inside. For instance, a two pane window will have Glaze Gap Glaze, where the first Glaze represents the outside surface. + + + + + + + + + + + + + + + + + + + This element is for purposes of modeling BIPV (building integrated photovoltaics). + + + + + + + Use this attribute to reference objects in the DOE2 library + + + + + Please specify the program that added this element. + + + + + + + + + + + + + + + + + + + + + + + + + + + Year of simulation to use + + + + + + + + + + + + Set of week schedules all assigned for a particular time period during the year defined by the begin and end date elements. These must not span more than one calendar year. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ID of heating schedule + + + + + Design temperature for cooling + + + + + Outside air schedule ID + + + + + Air changes schedule ID + + + + + ID of the fan schedule for this zone + + + + + Global Unique ID from Industry Foundation Class (IFC) file. + + + + + Please specify the program that added this element. + + + + + + + Minimum cooling load that triggers the shades to close + + + + + + + + + + diff --git a/XMLValidatorWeb/Images/TmpImage.gif b/XMLValidatorWeb/Images/TmpImage.gif new file mode 100644 index 0000000..0ca7ada Binary files /dev/null and b/XMLValidatorWeb/Images/TmpImage.gif differ diff --git a/XMLValidatorWeb/Images/brandmark-gbxml-small.png b/XMLValidatorWeb/Images/brandmark-gbxml-small.png new file mode 100644 index 0000000..25dc2b6 Binary files /dev/null and b/XMLValidatorWeb/Images/brandmark-gbxml-small.png differ diff --git a/XMLValidatorWeb/Images/header01.jpg b/XMLValidatorWeb/Images/header01.jpg new file mode 100644 index 0000000..2e379ca Binary files /dev/null and b/XMLValidatorWeb/Images/header01.jpg differ diff --git a/XMLValidatorWeb/Images/header_new.png b/XMLValidatorWeb/Images/header_new.png new file mode 100644 index 0000000..0ce5f1c Binary files /dev/null and b/XMLValidatorWeb/Images/header_new.png differ diff --git a/XMLValidatorWeb/MasterPage.Master b/XMLValidatorWeb/MasterPage.Master new file mode 100644 index 0000000..f9026ea --- /dev/null +++ b/XMLValidatorWeb/MasterPage.Master @@ -0,0 +1,46 @@ +<%@ Master Language="C#" AutoEventWireup="true" CodeBehind="MasterPage.master.cs" + Inherits="XMLValidatorWeb.MasterPage" %> + + + + + + + + + + + " rel="stylesheet" /> + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + diff --git a/XMLValidatorWeb/MasterPage.Master.cs b/XMLValidatorWeb/MasterPage.Master.cs new file mode 100644 index 0000000..795a292 --- /dev/null +++ b/XMLValidatorWeb/MasterPage.Master.cs @@ -0,0 +1,17 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Web.UI; +using System.Web.UI.WebControls; + +namespace XMLValidatorWeb +{ + public partial class MasterPage : System.Web.UI.MasterPage + { + protected void Page_Load(object sender, EventArgs e) + { + + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/MasterPage.Master.designer.cs b/XMLValidatorWeb/MasterPage.Master.designer.cs new file mode 100644 index 0000000..11b45f0 --- /dev/null +++ b/XMLValidatorWeb/MasterPage.Master.designer.cs @@ -0,0 +1,51 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace XMLValidatorWeb { + + + public partial class MasterPage { + + /// + /// head control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.ContentPlaceHolder head; + + /// + /// form1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.HtmlControls.HtmlForm form1; + + /// + /// Img1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.HtmlControls.HtmlImage Img1; + + /// + /// ContentPlaceHolder1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.ContentPlaceHolder ContentPlaceHolder1; + } +} diff --git a/XMLValidatorWeb/Pages/AjaxHandler.ashx b/XMLValidatorWeb/Pages/AjaxHandler.ashx new file mode 100644 index 0000000..5517416 --- /dev/null +++ b/XMLValidatorWeb/Pages/AjaxHandler.ashx @@ -0,0 +1 @@ +<%@ WebHandler Language="C#" CodeBehind="AjaxHandler.ashx.cs" Class="XMLValidatorWeb.AjaxHandler" %> diff --git a/XMLValidatorWeb/Pages/AjaxHandler.ashx.cs b/XMLValidatorWeb/Pages/AjaxHandler.ashx.cs new file mode 100644 index 0000000..2089aaa --- /dev/null +++ b/XMLValidatorWeb/Pages/AjaxHandler.ashx.cs @@ -0,0 +1,122 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using XMLValidatorWeb.SupportFiles; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json; +using System.IO; +using System.Xml; +using DOEgbXML; +using log4net; + +namespace XMLValidatorWeb +{ + /// + /// Summary description for AjaxHandler + /// + public class AjaxHandler : IHttpHandler + { + private static readonly ILog logger = + LogManager.GetLogger(typeof(XMLParser)); + + public void ProcessRequest(HttpContext context) + { + var formdata = context.Request.Form; + string testcaseName = formdata["testcase"].Replace('+',' '); + string schemaversion = formdata["schema"]; + gbXMLReport gbr = new gbXMLReport(); + gbr.gbxml_testcase = testcaseName; + gbr.gbxml_version = schemaversion; + + logger.Info("Received XML file from user at UTC time " + DateTime.UtcNow); + var file = context.Request.Files[0]; + if (file.ContentType == "text/xml") + { + logger.Info("File has extension XML."); + DOEgbXMLValidator val = new DOEgbXMLValidator(schemaversion); + XMLParser parser = new XMLParser(); + //if there is a file + //valadate it by pass in input stream as xmlreader + Stream responseStream = file.InputStream; + XmlReader xmlreader = XmlReader.Create(responseStream); + + //if it is not valid + if (!val.IsValidXmlEx(xmlreader) || val.nErrors > 0 || val.nWarnings > 0) + { + //if (PrintFriendlyButton != null) + // PrintFriendlyButton.Visible = false; + + //if (DownloadLogButton != null) + // DownloadLogButton.Visible = false; + + + //setup errorlog + if (val.nErrors > 0 || val.nWarnings > 0) + { + gbr.XMLSchemaErrors = val.nErrors.ToString(); + gbr.XMLSchemaWarnings = val.nWarnings.ToString(); + logger.Info("Found " + val.nErrors + " Errors and " + val.nWarnings + " Warnings " + val.Errors); + gbr.message = val.Errors; + gbr.schema_compliance_results = "FAIL"; + gbr.detailed_compliance_results = "DID NOT ATTEMPT DUE TO SCHEMA FAILURE"; + + } + else + { + gbr.XMLSchemaWarnings = "Infinity"; + gbr.XMLSchemaErrors = "Infinity"; + + logger.Info("Your XML File is severely deficient structurally. It may be missing element tags or is not valid XML. The test has failed. " + val.BigError); + gbr.message = "Your XML File is severely deficient structurally."; + gbr.schema_compliance_results = "FAIL"; + gbr.detailed_compliance_results = "DID NOT ATTEMPT DUE TO SCHEMA FAILURE"; + } + } + else{ + //the xml itself is totally valid + gbr.schema_compliance_results = "PASS"; + gbr.message = "The XML uploaded is validated against schema " + gbr.gbxml_version; + gbr.XMLSchemaErrors = val.nErrors.ToString(); + gbr.XMLSchemaWarnings = val.nWarnings.ToString(); + + //run test + + responseStream.Position = 0; + XmlReader xmlreader2 = XmlReader.Create(responseStream); + //start test + parser.StartTest(xmlreader2, testcaseName, ref gbr); + + //see if any of the PassOverall are failed + var campusProps = gbr.CampusReport; + var surffailures = campusProps.SurfacesReport.FindAll(x => x.FoundMatch == false); + if (surffailures.Count > 0) gbr.detailed_compliance_results = "FAIL"; + var spacefailures = campusProps.SpacesReport.FindAll(x => x.FoundMatch == false); + if (spacefailures.Count > 0) gbr.detailed_compliance_results = "FAIL"; + var surfsummaryfail = campusProps.SurfacesSummary.FindAll(x => x.PassedAllTests == false); + if (surfsummaryfail.Count > 0) gbr.detailed_compliance_results = "FAIL"; + var spacesummaryfail = campusProps.SpacesSummary.FindAll(x => x.PassedAllTests == false); + if (spacesummaryfail.Count > 0) gbr.detailed_compliance_results = "FAIL"; + //TODO: building summary, stories summary + } + } + else{ + //the stuff is not even xml + logger.Info("Your file does not end in .xml"); + gbr.message = "The file does not end in .xml"; + gbr.schema_compliance_results = "FAIL"; + } + + context.Response.ContentType = "application/json"; + context.Response.Write(JsonConvert.SerializeObject(gbr)); + } + + public bool IsReusable + { + get + { + return false; + } + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx new file mode 100644 index 0000000..5575291 --- /dev/null +++ b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx @@ -0,0 +1,18 @@ +<%@ Page Language="C#" AutoEventWireup="true" CodeBehind="PrintFriendlyTablePage.aspx.cs" Inherits="XMLValidatorWeb.PrintFriendlyTablePage" %> + + + + + + + + +
+
+ + + +
+
+ + diff --git a/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.cs b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.cs new file mode 100644 index 0000000..8b4c5ea --- /dev/null +++ b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Web.UI; +using System.Web.UI.WebControls; + + + +namespace XMLValidatorWeb +{ + public partial class PrintFriendlyTablePage : System.Web.UI.Page + { + protected void Page_Load(object sender, EventArgs e) + { + if (PrintFriendlyTablePageLabel != null) + { + if (Session["table"] == null) + Response.Redirect(@"~/"); + + PrintFriendlyTablePageLabel.Text = Session["table"].ToString(); + } + + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.designer.cs b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.designer.cs new file mode 100644 index 0000000..38c2c7e --- /dev/null +++ b/XMLValidatorWeb/Pages/PrintFriendlyTablePage.aspx.designer.cs @@ -0,0 +1,33 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace XMLValidatorWeb { + + + public partial class PrintFriendlyTablePage { + + /// + /// form1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.HtmlControls.HtmlForm form1; + + /// + /// PrintFriendlyTablePageLabel control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label PrintFriendlyTablePageLabel; + } +} diff --git a/XMLValidatorWeb/Pages/TestDetailPage.aspx b/XMLValidatorWeb/Pages/TestDetailPage.aspx new file mode 100644 index 0000000..9b138fa --- /dev/null +++ b/XMLValidatorWeb/Pages/TestDetailPage.aspx @@ -0,0 +1,18 @@ +<%@ Page Language="C#" MasterPageFile="~/MasterPage.Master" AutoEventWireup="true" CodeBehind="TestDetailPage.aspx.cs" Inherits="XMLValidatorWeb.TestDetail" %> + + +
+
+ +
+
+ +
+
+ +
+
+ +
+
+
diff --git a/XMLValidatorWeb/Pages/TestDetailPage.aspx.cs b/XMLValidatorWeb/Pages/TestDetailPage.aspx.cs new file mode 100644 index 0000000..1deddbc --- /dev/null +++ b/XMLValidatorWeb/Pages/TestDetailPage.aspx.cs @@ -0,0 +1,103 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Web; +using System.Web.UI; +using System.Web.UI.WebControls; +using DOEgbXML; + +namespace XMLValidatorWeb +{ + public partial class TestDetail : System.Web.UI.Page + { + protected void Page_Load(object sender, EventArgs e) + { + if (TestDetailLabelOverView != null) + { + if (Request.QueryString["type"] != "Error") + { + List reportlist = new List(); + if (Session["reportList"] == null || Request.QueryString["type"] == null) + Response.Redirect(@"~/"); + + reportlist = (List)Session["reportList"]; + + //looking for the right report from the list + int testType = 0; + int subType = -1; + + if (Request.QueryString["type"] != null) + { + try + { + testType = (int)Convert.ToInt32(Request.QueryString["type"]); + } + catch + { + return; + } + } + + if (Request.QueryString["subtype"] != null) + { + try + { + subType = (int)Convert.ToInt32(Request.QueryString["subtype"]); + } + catch + { + return; + } + } + + DOEgbXMLReportingObj rightReport = new DOEgbXMLReportingObj(); + foreach (DOEgbXMLReportingObj report in reportlist) + if (report.testType == (TestType)testType) + if (report.subTestIndex == -1 || report.subTestIndex == subType) + rightReport = report; + + + //title + string title = rightReport.testType.ToString(); + title = title.Replace("_", " "); + if (subType != -1) + title += " " + subType; + TestDetailLabelName.Text += "

" + title + "

"; + + //description + // TestDetailLabelOverView.Text += "

" + "Description.................................................................." + "

"; + TestDetailLabelOverView.Text += "

" + "Test Summary:" + "

" + + "

" + rightReport.testSummary + "

"; + var passTest = rightReport.TestPassedDict.Values; + bool individualTestBool = true; + foreach (bool testResult in passTest) + { + if (testResult == false) + { + individualTestBool = false; + break; + } + } + + string output = "

" + "Test Result:" + "

"; + if (rightReport.passOrFail && individualTestBool) + output += "
" + rightReport.longMsg + "
"; + else + output += "
" + rightReport.longMsg + "
"; + if (rightReport.MessageList.Count > 0) + for (int i = 0; i < rightReport.MessageList.Count; i++) + { + output += "
" + rightReport.MessageList[i] + "
"; + } + + TestDetailLabelResults.Text = output; + } + else + { + TestDetailImage.Visible = false; + TestDetailLabelResults.Text = Session["table"].ToString(); + } + } + } + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Pages/TestDetailPage.aspx.designer.cs b/XMLValidatorWeb/Pages/TestDetailPage.aspx.designer.cs new file mode 100644 index 0000000..9a86ba2 --- /dev/null +++ b/XMLValidatorWeb/Pages/TestDetailPage.aspx.designer.cs @@ -0,0 +1,51 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace XMLValidatorWeb { + + + public partial class TestDetail { + + /// + /// TestDetailLabelName control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label TestDetailLabelName; + + /// + /// TestDetailLabelOverView control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label TestDetailLabelOverView; + + /// + /// TestDetailImage control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Image TestDetailImage; + + /// + /// TestDetailLabelResults control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label TestDetailLabelResults; + } +} diff --git a/XMLValidatorWeb/Pages/TestPage.aspx b/XMLValidatorWeb/Pages/TestPage.aspx new file mode 100644 index 0000000..d38377d --- /dev/null +++ b/XMLValidatorWeb/Pages/TestPage.aspx @@ -0,0 +1,478 @@ +<%@ Page Title="" Language="C#" MasterPageFile="~/MasterPage.Master" AutoEventWireup="true" + CodeBehind="TestPage.aspx.cs" Inherits="XMLValidatorWeb.Pages.TestPage" %> + + + + + + + + + + + + + " rel="stylesheet" /> + + + +
+
+
+ +
+
+
+

+ gbXML Vendor Certification Validator +

+
+
+ + + + + + + + + + + + + +

Select Which gbXML Schema You Wish to Test Against:

+ + +
+

Select Which Test You Want To Run:

+
+ + +
+

Select Your File for Validation:

+
+ +
+ +
+

Test Description

+
+ +
+ +
+ + + + <%--
+ Select Your XML File Here: + + + +
--%> + <%----%> + <%-- + --%> +
+
+ + + +
+
+ +
+
+ +
+ <%-- +
+ --%> +
+
+
+ + + + + +
diff --git a/XMLValidatorWeb/Pages/TestPage.aspx.cs b/XMLValidatorWeb/Pages/TestPage.aspx.cs new file mode 100644 index 0000000..57f8840 --- /dev/null +++ b/XMLValidatorWeb/Pages/TestPage.aspx.cs @@ -0,0 +1,382 @@ +using System; +using System.Collections.Generic; +using System.Web.UI; +using System.Web.UI.WebControls; +using System.Text.RegularExpressions; +using DOEgbXML; +using System.Xml; +using System.IO; +using System.Web.Services; +using Newtonsoft.Json.Linq; +using XMLValidatorWeb.SupportFiles; +using System.Web; +using System.Reflection; + +namespace XMLValidatorWeb.Pages +{ + public partial class TestPage : System.Web.UI.Page + { + protected void Page_Load(object sender, EventArgs e) + { + //create testlist for creating dropdown list iteams dynamically + DOEgbXMLTestDetail testList = new DOEgbXMLTestDetail(); + testList.InitializeTestResultStrings(); + + //create dropdownlist items base on the tests + if (DropDownList1 != null) + { + string selectedValue = DropDownList1.SelectedValue; + + //clear all iteam + DropDownList1.Items.Clear(); + + foreach (DOEgbXMLTestDetail detail in testList.TestDetailList) + //if test is the one selected before select it + if (detail.testName == selectedValue) + { + DropDownList1.Items.Add(new ListItem(detail.testName, detail.testName, true)); + DropDownList1.SelectedValue = selectedValue; + } + else + DropDownList1.Items.Add(new ListItem(detail.testName, detail.testName)); + } + + if (DropDownList2 != null) + { + string selectedValue = DropDownList2.SelectedValue; + + //clear all iteam + DropDownList2.Items.Clear(); + //get all of the available XSDs + var root = AppDomain.CurrentDomain.BaseDirectory; + var path = root + "SupportFiles//XSD"; + string[] files = Directory.GetFiles(path, "*.xsd"); + for(int i = 0; i < files.Length; i++) + files[i] = Path.GetFileName(files[i]); + + + foreach (string file in files) + //if test is the one selected before select it + if (file == selectedValue) + { + DropDownList2.Items.Add(new ListItem(file, file, true)); + DropDownList2.SelectedValue = selectedValue; + } + else + DropDownList2.Items.Add(new ListItem(file, file)); + } + + if (TestSummuryLabel != null) + { + //show the test summary of the selected test + foreach (DOEgbXMLTestDetail detail in testList.TestDetailList) + if (detail.testName == DropDownList1.SelectedValue) + { + TestSummuryLabel.Text = detail.testSummary; + break; + } + } + } + + [WebMethod] + public static CampusReport UploadTest(HttpContext context) + { + var r = context; + CampusReport c = new CampusReport(); + //if there is a file + //valadate it by pass in input stream as xmlreader + + return c; + } + + [WebMethod] + public static string GetResults() + { + //open log file and save as a string + try + { + var root = AppDomain.CurrentDomain.BaseDirectory; + string path = root+"log-file.txt"; + string logblog = File.ReadAllText(path); + int len = logblog.Length; + return logblog; + } + catch(Exception e) + { + return "Error: We're sorry, we could not read this test's log file. Please contact gbxml.org to report this problem."; + } + } + + [WebMethod] + public static string GetErrorSummary() + { + try + { + var root = AppDomain.CurrentDomain.BaseDirectory; + string readpath = root + "log-file.txt"; + string writepath = root + "log-file-summary.txt"; + + List tempStrings = new List(); + bool write = false; + bool errors = false; + int startIndex = 0; + int linecount = 0; + foreach (string line in File.ReadLines(readpath)) + { + Match m = Regex.Match(line, "START:"); + if (m.Success) + { + startIndex = tempStrings.Count == 0 ? 0 : tempStrings.Count; //designed to keep the list short + write = true; + linecount = 0; + } + if (write) + { + tempStrings.Add(line); + linecount++; + } + Match errormatch = Regex.Match(line, "TEST FILE FAILURE|FATAL"); //removed MATCH FAILED as this is only a failure of SurfaceMatch subtests, not the whole test + if (errormatch.Success) + { + errors = true; + } + Match mend = Regex.Match(line, "END:"); + if (mend.Success) + { + if(!errors) + { + tempStrings.RemoveRange(startIndex, linecount); + } + write = false; + errors = false; + } + + } + + if(tempStrings.Count > 0) + { + using (StreamWriter writetext = new StreamWriter(writepath)) + { + foreach (string line in tempStrings) + { + writetext.WriteLine(line); + } + } + + readpath = writepath; + string logsummary = File.ReadAllText(readpath); + return logsummary; + } + else + { + return "Note for user. There are no errors to report!"; + } + + + } + catch(Exception e) + { + return "Error: We're sorry, we could not read this test's log file. Please contact gbxml.org to report this problem."; + } + } + + //protected void upLoadButton_Click1(object sender, EventArgs e) + //{ + // if (FileUpload1.HasFile) + // { + // if (FileUpload1.PostedFile.ContentType == "text/xml") + // { + // //if there is a file + // //valadate it by pass in input stream as xmlreader + // Stream responseStream = FileUpload1.PostedFile.InputStream; + // XmlReader xmlreader = XmlReader.Create(responseStream); + + + // //initialize instances for testing + // DOEgbXMLValidator val = new DOEgbXMLValidator(); + // XMLParser parser = new XMLParser(); + // //if it is not valid + // if (!val.IsValidXmlEx(xmlreader) || val.nErrors > 0 || val.nWarnings > 0) + // { + // if (PrintFriendlyButton != null) + // PrintFriendlyButton.Visible = false; + + // if (DownloadLogButton != null) + // DownloadLogButton.Visible = false; + + + // //setup errorlog + // string errorLog = ""; + // string errorDes = ""; + // if (val.nErrors > 0 || val.nWarnings > 0) + // { + // errorLog += "

" + "Find " + val.nErrors + " Errors and " + val.nWarnings + " Warnings

" + val.Errors + "

"; + // errorDes = "Find "; + // if (val.nErrors > 0) + // { + // errorDes += val.nErrors; + // if (val.nWarnings > 0) + // errorDes += " Errors and"; + // else + // errorDes += " Errors"; + + // } + // if (val.nWarnings > 0) + // errorDes += val.nWarnings + " Warnings"; + // } + // else + // { + // errorLog += "

" + "Your XML File is severely deficient structurally. It may be missing element tags or is not valid XML. The test has failed.

" + val.BigError + "
" + "

"; + // errorDes = "Your XML File is severely deficient structurally."; + // } + // // Session.Add("table", errorLog); + // Session["table"] = errorLog; + + // TestResultLabel.Text = ""; + + // ResultSummaryLabel.Text = "

Result Summary

"; + // ResultSummaryLabel.Text += "
"; + // ResultSummaryLabel.Text += "" + + // "" + + // "" + + // "" + + // "" + + // ""; + // ResultSummaryLabel.Text += "
" + "gbXML schema Test" + "" + errorDes + "" + "Fail" + "" + "" + "More Detail" + "" + "

"; + // } + // //if it is valid + // else if (val.nErrors == 0 && val.nWarnings == 0) + // { + // //run test + + // responseStream.Position = 0; + // XmlReader xmlreader2 = XmlReader.Create(responseStream); + // //start test + // parser.StartTest(xmlreader2, DropDownList1.SelectedValue, Page.User.Identity.Name); + + // string results = parser.browserjson; + // results = results.Replace(@"\", @"\\"); + // ClientScriptManager cs = Page.ClientScript; + // cs.RegisterStartupScript(GetType(), "Startup", "init('" + results + "');", true); + // //show summary table + // ResultSummaryLabel.Text = parser.summaryTable; + + // //show test section table + // TestResultLabel.Text = parser.table; + + // //store reportlist in session + // Session["reportList"] = parser.ReportList; + + + // LogLabel.Text = parser.log; + // TableLabel.Text = parser.table; + // //remove extra tag + // //TableLabel.Text = TableLabel.Text.Replace("", ""); + // //TableLabel.Text = TableLabel.Text.Replace("", ""); + // //TableLabel.Text = TableLabel.Text.Replace("", "
"); + // //DownloadLogButton.Visible = true; + // //PrintFriendlyButton.Visible = true; + // } + // //this should never happens + // else + // { + // ResultSummaryLabel.Text = "?????????something is very wrong"; + // TestResultLabel.Text = ""; + // } + + // } + // //if the file type is not xml + // else + // { + // if (PrintFriendlyButton != null) + // PrintFriendlyButton.Visible = false; + + // if (DownloadLogButton != null) + // DownloadLogButton.Visible = false; + + // ResultSummaryLabel.Text = ""; + // TestResultLabel.Text = ""; + + // ResultSummaryLabel.Text = "

Result Summary

"; + // ResultSummaryLabel.Text += "
"; + // ResultSummaryLabel.Text += "" + + // "" + + // "" + + // "" + + + // ""; + // ResultSummaryLabel.Text += "
" + "gbXML schema Test" + "" + "You have not specified a right type of file." + "" + "Fail" + "

"; + // } + // } + // //if there is no file + // else + // { + // if (PrintFriendlyButton != null) + // PrintFriendlyButton.Visible = false; + + // if (DownloadLogButton != null) + // DownloadLogButton.Visible = false; + + + // ResultSummaryLabel.Text = ""; + // TestResultLabel.Text = ""; + + // ResultSummaryLabel.Text = "

Result Summary

"; + // ResultSummaryLabel.Text += ""; + // ResultSummaryLabel.Text += "" + + // "" + + // "" + + // "" + + // ""; + // ResultSummaryLabel.Text += "
" + "gbXML schema Test" + "" + "You have not specified a file." + "" + "Fail" + "

"; + + // } + //} + + + protected void DownloadLogButton_Click(object sender, EventArgs e) + { + Response.Clear(); + Response.ClearContent(); + Response.ClearHeaders(); + Response.AddHeader("content-disposition", "attachment;filename=Log.txt"); + Response.ContentType = "text/plain"; + Response.Write(LogLabel.Text); + Response.End(); + } + + protected void PrintFriendlyButton_Click(object sender, EventArgs e) + { + Session.Add("table", TableLabel.Text); + + string url = "PrintFriendlyTablePage.aspx"; + + ClientScript.RegisterStartupScript(this.GetType(), "OpenWindow", ""); + } + + //TODO: REMOVE? + protected void DropDownList1_SelectedIndexChanged(object sender, EventArgs e) + { + //if change the selected index clear all labels + //ResultSummaryLabel.Text = ""; + //TestResultLabel.Text = ""; + //LogLabel.Text = ""; + //TableLabel.Text = ""; + //DownloadLogButton.Visible = false; + //PrintFriendlyButton.Visible = false; + } + + //TODO: REMOVE? + protected void DropDownList2_SelectedIndexChanged(object sender, EventArgs e) + { + //if change the selected index clear all labels + //ResultSummaryLabel.Text = ""; + //TestResultLabel.Text = ""; + //LogLabel.Text = ""; + //TableLabel.Text = ""; + //DownloadLogButton.Visible = false; + //PrintFriendlyButton.Visible = false; + } + + } +} \ No newline at end of file diff --git a/XMLValidatorWeb/Pages/TestPage.aspx.designer.cs b/XMLValidatorWeb/Pages/TestPage.aspx.designer.cs new file mode 100644 index 0000000..9a46972 --- /dev/null +++ b/XMLValidatorWeb/Pages/TestPage.aspx.designer.cs @@ -0,0 +1,60 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace XMLValidatorWeb.Pages { + + + public partial class TestPage { + + /// + /// DropDownList2 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.DropDownList DropDownList2; + + /// + /// DropDownList1 control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.DropDownList DropDownList1; + + /// + /// TestSummuryLabel control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label TestSummuryLabel; + + /// + /// LogLabel control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label LogLabel; + + /// + /// TableLabel control. + /// + /// + /// Auto-generated field. + /// To modify move field declaration from designer file to code-behind file. + /// + protected global::System.Web.UI.WebControls.Label TableLabel; + } +} diff --git a/XMLValidatorWeb/Properties/AssemblyInfo.cs b/XMLValidatorWeb/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..5e3cdcc --- /dev/null +++ b/XMLValidatorWeb/Properties/AssemblyInfo.cs @@ -0,0 +1,35 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("XMLValidatorWeb")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("XMLValidatorWeb")] +[assembly: AssemblyCopyright("Copyright © 2012")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("64367132-ef23-4db1-832c-5a2800ab3d85")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Revision and Build Numbers +// by using the '*' as shown below: +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/XMLValidatorWeb/Scripts/FileSaver.js b/XMLValidatorWeb/Scripts/FileSaver.js new file mode 100644 index 0000000..239db12 --- /dev/null +++ b/XMLValidatorWeb/Scripts/FileSaver.js @@ -0,0 +1,188 @@ +/* FileSaver.js + * A saveAs() FileSaver implementation. + * 1.3.2 + * 2016-06-16 18:25:19 + * + * By Eli Grey, http://eligrey.com + * License: MIT + * See https://github.com/eligrey/FileSaver.js/blob/master/LICENSE.md + */ + +/*global self */ +/*jslint bitwise: true, indent: 4, laxbreak: true, laxcomma: true, smarttabs: true, plusplus: true */ + +/*! @source http://purl.eligrey.com/github/FileSaver.js/blob/master/FileSaver.js */ + +var saveAs = saveAs || (function(view) { + "use strict"; + // IE <10 is explicitly unsupported + if (typeof view === "undefined" || typeof navigator !== "undefined" && /MSIE [1-9]\./.test(navigator.userAgent)) { + return; + } + var + doc = view.document + // only get URL when necessary in case Blob.js hasn't overridden it yet + , get_URL = function() { + return view.URL || view.webkitURL || view; + } + , save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a") + , can_use_save_link = "download" in save_link + , click = function(node) { + var event = new MouseEvent("click"); + node.dispatchEvent(event); + } + , is_safari = /constructor/i.test(view.HTMLElement) + , is_chrome_ios =/CriOS\/[\d]+/.test(navigator.userAgent) + , throw_outside = function(ex) { + (view.setImmediate || view.setTimeout)(function() { + throw ex; + }, 0); + } + , force_saveable_type = "application/octet-stream" + // the Blob API is fundamentally broken as there is no "downloadfinished" event to subscribe to + , arbitrary_revoke_timeout = 1000 * 40 // in ms + , revoke = function(file) { + var revoker = function() { + if (typeof file === "string") { // file is an object URL + get_URL().revokeObjectURL(file); + } else { // file is a File + file.remove(); + } + }; + setTimeout(revoker, arbitrary_revoke_timeout); + } + , dispatch = function(filesaver, event_types, event) { + event_types = [].concat(event_types); + var i = event_types.length; + while (i--) { + var listener = filesaver["on" + event_types[i]]; + if (typeof listener === "function") { + try { + listener.call(filesaver, event || filesaver); + } catch (ex) { + throw_outside(ex); + } + } + } + } + , auto_bom = function(blob) { + // prepend BOM for UTF-8 XML and text/* types (including HTML) + // note: your browser will automatically convert UTF-16 U+FEFF to EF BB BF + if (/^\s*(?:text\/\S*|application\/xml|\S*\/\S*\+xml)\s*;.*charset\s*=\s*utf-8/i.test(blob.type)) { + return new Blob([String.fromCharCode(0xFEFF), blob], {type: blob.type}); + } + return blob; + } + , FileSaver = function(blob, name, no_auto_bom) { + if (!no_auto_bom) { + blob = auto_bom(blob); + } + // First try a.download, then web filesystem, then object URLs + var + filesaver = this + , type = blob.type + , force = type === force_saveable_type + , object_url + , dispatch_all = function() { + dispatch(filesaver, "writestart progress write writeend".split(" ")); + } + // on any filesys errors revert to saving with object URLs + , fs_error = function() { + if ((is_chrome_ios || (force && is_safari)) && view.FileReader) { + // Safari doesn't allow downloading of blob urls + var reader = new FileReader(); + reader.onloadend = function() { + var url = is_chrome_ios ? reader.result : reader.result.replace(/^data:[^;]*;/, 'data:attachment/file;'); + var popup = view.open(url, '_blank'); + if(!popup) view.location.href = url; + url=undefined; // release reference before dispatching + filesaver.readyState = filesaver.DONE; + dispatch_all(); + }; + reader.readAsDataURL(blob); + filesaver.readyState = filesaver.INIT; + return; + } + // don't create more object URLs than needed + if (!object_url) { + object_url = get_URL().createObjectURL(blob); + } + if (force) { + view.location.href = object_url; + } else { + var opened = view.open(object_url, "_blank"); + if (!opened) { + // Apple does not allow window.open, see https://developer.apple.com/library/safari/documentation/Tools/Conceptual/SafariExtensionGuide/WorkingwithWindowsandTabs/WorkingwithWindowsandTabs.html + view.location.href = object_url; + } + } + filesaver.readyState = filesaver.DONE; + dispatch_all(); + revoke(object_url); + } + ; + filesaver.readyState = filesaver.INIT; + + if (can_use_save_link) { + object_url = get_URL().createObjectURL(blob); + setTimeout(function() { + save_link.href = object_url; + save_link.download = name; + click(save_link); + dispatch_all(); + revoke(object_url); + filesaver.readyState = filesaver.DONE; + }); + return; + } + + fs_error(); + } + , FS_proto = FileSaver.prototype + , saveAs = function(blob, name, no_auto_bom) { + return new FileSaver(blob, name || blob.name || "download", no_auto_bom); + } + ; + // IE 10+ (native saveAs) + if (typeof navigator !== "undefined" && navigator.msSaveOrOpenBlob) { + return function(blob, name, no_auto_bom) { + name = name || blob.name || "download"; + + if (!no_auto_bom) { + blob = auto_bom(blob); + } + return navigator.msSaveOrOpenBlob(blob, name); + }; + } + + FS_proto.abort = function(){}; + FS_proto.readyState = FS_proto.INIT = 0; + FS_proto.WRITING = 1; + FS_proto.DONE = 2; + + FS_proto.error = + FS_proto.onwritestart = + FS_proto.onprogress = + FS_proto.onwrite = + FS_proto.onabort = + FS_proto.onerror = + FS_proto.onwriteend = + null; + + return saveAs; +}( + typeof self !== "undefined" && self + || typeof window !== "undefined" && window + || this.content +)); +// `self` is undefined in Firefox for Android content script context +// while `this` is nsIContentFrameMessageManager +// with an attribute `content` that corresponds to the window + +if (typeof module !== "undefined" && module.exports) { + module.exports.saveAs = saveAs; +} else if ((typeof define !== "undefined" && define !== null) && (define.amd !== null)) { + define([], function() { + return saveAs; + }); +} diff --git a/XMLValidatorWeb/Scripts/jquery-ui.js b/XMLValidatorWeb/Scripts/jquery-ui.js new file mode 100644 index 0000000..fb2d3c7 --- /dev/null +++ b/XMLValidatorWeb/Scripts/jquery-ui.js @@ -0,0 +1,18686 @@ +/*! jQuery UI - v1.12.0 - 2016-07-18 +* http://jqueryui.com +* Includes: widget.js, position.js, data.js, disable-selection.js, focusable.js, form-reset-mixin.js, jquery-1-7.js, keycode.js, labels.js, scroll-parent.js, tabbable.js, unique-id.js, widgets/draggable.js, widgets/droppable.js, widgets/resizable.js, widgets/selectable.js, widgets/sortable.js, widgets/accordion.js, widgets/autocomplete.js, widgets/button.js, widgets/checkboxradio.js, widgets/controlgroup.js, widgets/datepicker.js, widgets/dialog.js, widgets/menu.js, widgets/mouse.js, widgets/progressbar.js, widgets/selectmenu.js, widgets/slider.js, widgets/spinner.js, widgets/tabs.js, widgets/tooltip.js, effect.js, effects/effect-blind.js, effects/effect-bounce.js, effects/effect-clip.js, effects/effect-drop.js, effects/effect-explode.js, effects/effect-fade.js, effects/effect-fold.js, effects/effect-highlight.js, effects/effect-puff.js, effects/effect-pulsate.js, effects/effect-scale.js, effects/effect-shake.js, effects/effect-size.js, effects/effect-slide.js, effects/effect-transfer.js +* Copyright jQuery Foundation and other contributors; Licensed MIT */ + +(function( factory ) { + if ( typeof define === "function" && define.amd ) { + + // AMD. Register as an anonymous module. + define([ "jquery" ], factory ); + } else { + + // Browser globals + factory( jQuery ); + } +}(function( $ ) { + +$.ui = $.ui || {}; + +var version = $.ui.version = "1.12.0"; + + +/*! + * jQuery UI Widget 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: Widget +//>>group: Core +//>>description: Provides a factory for creating stateful widgets with a common API. +//>>docs: http://api.jqueryui.com/jQuery.widget/ +//>>demos: http://jqueryui.com/widget/ + + + +var widgetUuid = 0; +var widgetSlice = Array.prototype.slice; + +$.cleanData = ( function( orig ) { + return function( elems ) { + var events, elem, i; + for ( i = 0; ( elem = elems[ i ] ) != null; i++ ) { + try { + + // Only trigger remove when necessary to save time + events = $._data( elem, "events" ); + if ( events && events.remove ) { + $( elem ).triggerHandler( "remove" ); + } + + // Http://bugs.jquery.com/ticket/8235 + } catch ( e ) {} + } + orig( elems ); + }; +} )( $.cleanData ); + +$.widget = function( name, base, prototype ) { + var existingConstructor, constructor, basePrototype; + + // ProxiedPrototype allows the provided prototype to remain unmodified + // so that it can be used as a mixin for multiple widgets (#8876) + var proxiedPrototype = {}; + + var namespace = name.split( "." )[ 0 ]; + name = name.split( "." )[ 1 ]; + var fullName = namespace + "-" + name; + + if ( !prototype ) { + prototype = base; + base = $.Widget; + } + + if ( $.isArray( prototype ) ) { + prototype = $.extend.apply( null, [ {} ].concat( prototype ) ); + } + + // Create selector for plugin + $.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) { + return !!$.data( elem, fullName ); + }; + + $[ namespace ] = $[ namespace ] || {}; + existingConstructor = $[ namespace ][ name ]; + constructor = $[ namespace ][ name ] = function( options, element ) { + + // Allow instantiation without "new" keyword + if ( !this._createWidget ) { + return new constructor( options, element ); + } + + // Allow instantiation without initializing for simple inheritance + // must use "new" keyword (the code above always passes args) + if ( arguments.length ) { + this._createWidget( options, element ); + } + }; + + // Extend with the existing constructor to carry over any static properties + $.extend( constructor, existingConstructor, { + version: prototype.version, + + // Copy the object used to create the prototype in case we need to + // redefine the widget later + _proto: $.extend( {}, prototype ), + + // Track widgets that inherit from this widget in case this widget is + // redefined after a widget inherits from it + _childConstructors: [] + } ); + + basePrototype = new base(); + + // We need to make the options hash a property directly on the new instance + // otherwise we'll modify the options hash on the prototype that we're + // inheriting from + basePrototype.options = $.widget.extend( {}, basePrototype.options ); + $.each( prototype, function( prop, value ) { + if ( !$.isFunction( value ) ) { + proxiedPrototype[ prop ] = value; + return; + } + proxiedPrototype[ prop ] = ( function() { + function _super() { + return base.prototype[ prop ].apply( this, arguments ); + } + + function _superApply( args ) { + return base.prototype[ prop ].apply( this, args ); + } + + return function() { + var __super = this._super; + var __superApply = this._superApply; + var returnValue; + + this._super = _super; + this._superApply = _superApply; + + returnValue = value.apply( this, arguments ); + + this._super = __super; + this._superApply = __superApply; + + return returnValue; + }; + } )(); + } ); + constructor.prototype = $.widget.extend( basePrototype, { + + // TODO: remove support for widgetEventPrefix + // always use the name + a colon as the prefix, e.g., draggable:start + // don't prefix for widgets that aren't DOM-based + widgetEventPrefix: existingConstructor ? ( basePrototype.widgetEventPrefix || name ) : name + }, proxiedPrototype, { + constructor: constructor, + namespace: namespace, + widgetName: name, + widgetFullName: fullName + } ); + + // If this widget is being redefined then we need to find all widgets that + // are inheriting from it and redefine all of them so that they inherit from + // the new version of this widget. We're essentially trying to replace one + // level in the prototype chain. + if ( existingConstructor ) { + $.each( existingConstructor._childConstructors, function( i, child ) { + var childPrototype = child.prototype; + + // Redefine the child widget using the same prototype that was + // originally used, but inherit from the new version of the base + $.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, + child._proto ); + } ); + + // Remove the list of existing child constructors from the old constructor + // so the old child constructors can be garbage collected + delete existingConstructor._childConstructors; + } else { + base._childConstructors.push( constructor ); + } + + $.widget.bridge( name, constructor ); + + return constructor; +}; + +$.widget.extend = function( target ) { + var input = widgetSlice.call( arguments, 1 ); + var inputIndex = 0; + var inputLength = input.length; + var key; + var value; + + for ( ; inputIndex < inputLength; inputIndex++ ) { + for ( key in input[ inputIndex ] ) { + value = input[ inputIndex ][ key ]; + if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) { + + // Clone objects + if ( $.isPlainObject( value ) ) { + target[ key ] = $.isPlainObject( target[ key ] ) ? + $.widget.extend( {}, target[ key ], value ) : + + // Don't extend strings, arrays, etc. with objects + $.widget.extend( {}, value ); + + // Copy everything else by reference + } else { + target[ key ] = value; + } + } + } + } + return target; +}; + +$.widget.bridge = function( name, object ) { + var fullName = object.prototype.widgetFullName || name; + $.fn[ name ] = function( options ) { + var isMethodCall = typeof options === "string"; + var args = widgetSlice.call( arguments, 1 ); + var returnValue = this; + + if ( isMethodCall ) { + this.each( function() { + var methodValue; + var instance = $.data( this, fullName ); + + if ( options === "instance" ) { + returnValue = instance; + return false; + } + + if ( !instance ) { + return $.error( "cannot call methods on " + name + + " prior to initialization; " + + "attempted to call method '" + options + "'" ); + } + + if ( !$.isFunction( instance[ options ] ) || options.charAt( 0 ) === "_" ) { + return $.error( "no such method '" + options + "' for " + name + + " widget instance" ); + } + + methodValue = instance[ options ].apply( instance, args ); + + if ( methodValue !== instance && methodValue !== undefined ) { + returnValue = methodValue && methodValue.jquery ? + returnValue.pushStack( methodValue.get() ) : + methodValue; + return false; + } + } ); + } else { + + // Allow multiple hashes to be passed on init + if ( args.length ) { + options = $.widget.extend.apply( null, [ options ].concat( args ) ); + } + + this.each( function() { + var instance = $.data( this, fullName ); + if ( instance ) { + instance.option( options || {} ); + if ( instance._init ) { + instance._init(); + } + } else { + $.data( this, fullName, new object( options, this ) ); + } + } ); + } + + return returnValue; + }; +}; + +$.Widget = function( /* options, element */ ) {}; +$.Widget._childConstructors = []; + +$.Widget.prototype = { + widgetName: "widget", + widgetEventPrefix: "", + defaultElement: "
", + + options: { + classes: {}, + disabled: false, + + // Callbacks + create: null + }, + + _createWidget: function( options, element ) { + element = $( element || this.defaultElement || this )[ 0 ]; + this.element = $( element ); + this.uuid = widgetUuid++; + this.eventNamespace = "." + this.widgetName + this.uuid; + + this.bindings = $(); + this.hoverable = $(); + this.focusable = $(); + this.classesElementLookup = {}; + + if ( element !== this ) { + $.data( element, this.widgetFullName, this ); + this._on( true, this.element, { + remove: function( event ) { + if ( event.target === element ) { + this.destroy(); + } + } + } ); + this.document = $( element.style ? + + // Element within the document + element.ownerDocument : + + // Element is window or document + element.document || element ); + this.window = $( this.document[ 0 ].defaultView || this.document[ 0 ].parentWindow ); + } + + this.options = $.widget.extend( {}, + this.options, + this._getCreateOptions(), + options ); + + this._create(); + + if ( this.options.disabled ) { + this._setOptionDisabled( this.options.disabled ); + } + + this._trigger( "create", null, this._getCreateEventData() ); + this._init(); + }, + + _getCreateOptions: function() { + return {}; + }, + + _getCreateEventData: $.noop, + + _create: $.noop, + + _init: $.noop, + + destroy: function() { + var that = this; + + this._destroy(); + $.each( this.classesElementLookup, function( key, value ) { + that._removeClass( value, key ); + } ); + + // We can probably remove the unbind calls in 2.0 + // all event bindings should go through this._on() + this.element + .off( this.eventNamespace ) + .removeData( this.widgetFullName ); + this.widget() + .off( this.eventNamespace ) + .removeAttr( "aria-disabled" ); + + // Clean up events and states + this.bindings.off( this.eventNamespace ); + }, + + _destroy: $.noop, + + widget: function() { + return this.element; + }, + + option: function( key, value ) { + var options = key; + var parts; + var curOption; + var i; + + if ( arguments.length === 0 ) { + + // Don't return a reference to the internal hash + return $.widget.extend( {}, this.options ); + } + + if ( typeof key === "string" ) { + + // Handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } } + options = {}; + parts = key.split( "." ); + key = parts.shift(); + if ( parts.length ) { + curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] ); + for ( i = 0; i < parts.length - 1; i++ ) { + curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {}; + curOption = curOption[ parts[ i ] ]; + } + key = parts.pop(); + if ( arguments.length === 1 ) { + return curOption[ key ] === undefined ? null : curOption[ key ]; + } + curOption[ key ] = value; + } else { + if ( arguments.length === 1 ) { + return this.options[ key ] === undefined ? null : this.options[ key ]; + } + options[ key ] = value; + } + } + + this._setOptions( options ); + + return this; + }, + + _setOptions: function( options ) { + var key; + + for ( key in options ) { + this._setOption( key, options[ key ] ); + } + + return this; + }, + + _setOption: function( key, value ) { + if ( key === "classes" ) { + this._setOptionClasses( value ); + } + + this.options[ key ] = value; + + if ( key === "disabled" ) { + this._setOptionDisabled( value ); + } + + return this; + }, + + _setOptionClasses: function( value ) { + var classKey, elements, currentElements; + + for ( classKey in value ) { + currentElements = this.classesElementLookup[ classKey ]; + if ( value[ classKey ] === this.options.classes[ classKey ] || + !currentElements || + !currentElements.length ) { + continue; + } + + // We are doing this to create a new jQuery object because the _removeClass() call + // on the next line is going to destroy the reference to the current elements being + // tracked. We need to save a copy of this collection so that we can add the new classes + // below. + elements = $( currentElements.get() ); + this._removeClass( currentElements, classKey ); + + // We don't use _addClass() here, because that uses this.options.classes + // for generating the string of classes. We want to use the value passed in from + // _setOption(), this is the new value of the classes option which was passed to + // _setOption(). We pass this value directly to _classes(). + elements.addClass( this._classes( { + element: elements, + keys: classKey, + classes: value, + add: true + } ) ); + } + }, + + _setOptionDisabled: function( value ) { + this._toggleClass( this.widget(), this.widgetFullName + "-disabled", null, !!value ); + + // If the widget is becoming disabled, then nothing is interactive + if ( value ) { + this._removeClass( this.hoverable, null, "ui-state-hover" ); + this._removeClass( this.focusable, null, "ui-state-focus" ); + } + }, + + enable: function() { + return this._setOptions( { disabled: false } ); + }, + + disable: function() { + return this._setOptions( { disabled: true } ); + }, + + _classes: function( options ) { + var full = []; + var that = this; + + options = $.extend( { + element: this.element, + classes: this.options.classes || {} + }, options ); + + function processClassString( classes, checkOption ) { + var current, i; + for ( i = 0; i < classes.length; i++ ) { + current = that.classesElementLookup[ classes[ i ] ] || $(); + if ( options.add ) { + current = $( $.unique( current.get().concat( options.element.get() ) ) ); + } else { + current = $( current.not( options.element ).get() ); + } + that.classesElementLookup[ classes[ i ] ] = current; + full.push( classes[ i ] ); + if ( checkOption && options.classes[ classes[ i ] ] ) { + full.push( options.classes[ classes[ i ] ] ); + } + } + } + + if ( options.keys ) { + processClassString( options.keys.match( /\S+/g ) || [], true ); + } + if ( options.extra ) { + processClassString( options.extra.match( /\S+/g ) || [] ); + } + + return full.join( " " ); + }, + + _removeClass: function( element, keys, extra ) { + return this._toggleClass( element, keys, extra, false ); + }, + + _addClass: function( element, keys, extra ) { + return this._toggleClass( element, keys, extra, true ); + }, + + _toggleClass: function( element, keys, extra, add ) { + add = ( typeof add === "boolean" ) ? add : extra; + var shift = ( typeof element === "string" || element === null ), + options = { + extra: shift ? keys : extra, + keys: shift ? element : keys, + element: shift ? this.element : element, + add: add + }; + options.element.toggleClass( this._classes( options ), add ); + return this; + }, + + _on: function( suppressDisabledCheck, element, handlers ) { + var delegateElement; + var instance = this; + + // No suppressDisabledCheck flag, shuffle arguments + if ( typeof suppressDisabledCheck !== "boolean" ) { + handlers = element; + element = suppressDisabledCheck; + suppressDisabledCheck = false; + } + + // No element argument, shuffle and use this.element + if ( !handlers ) { + handlers = element; + element = this.element; + delegateElement = this.widget(); + } else { + element = delegateElement = $( element ); + this.bindings = this.bindings.add( element ); + } + + $.each( handlers, function( event, handler ) { + function handlerProxy() { + + // Allow widgets to customize the disabled handling + // - disabled as an array instead of boolean + // - disabled class as method for disabling individual parts + if ( !suppressDisabledCheck && + ( instance.options.disabled === true || + $( this ).hasClass( "ui-state-disabled" ) ) ) { + return; + } + return ( typeof handler === "string" ? instance[ handler ] : handler ) + .apply( instance, arguments ); + } + + // Copy the guid so direct unbinding works + if ( typeof handler !== "string" ) { + handlerProxy.guid = handler.guid = + handler.guid || handlerProxy.guid || $.guid++; + } + + var match = event.match( /^([\w:-]*)\s*(.*)$/ ); + var eventName = match[ 1 ] + instance.eventNamespace; + var selector = match[ 2 ]; + + if ( selector ) { + delegateElement.on( eventName, selector, handlerProxy ); + } else { + element.on( eventName, handlerProxy ); + } + } ); + }, + + _off: function( element, eventName ) { + eventName = ( eventName || "" ).split( " " ).join( this.eventNamespace + " " ) + + this.eventNamespace; + element.off( eventName ).off( eventName ); + + // Clear the stack to avoid memory leaks (#10056) + this.bindings = $( this.bindings.not( element ).get() ); + this.focusable = $( this.focusable.not( element ).get() ); + this.hoverable = $( this.hoverable.not( element ).get() ); + }, + + _delay: function( handler, delay ) { + function handlerProxy() { + return ( typeof handler === "string" ? instance[ handler ] : handler ) + .apply( instance, arguments ); + } + var instance = this; + return setTimeout( handlerProxy, delay || 0 ); + }, + + _hoverable: function( element ) { + this.hoverable = this.hoverable.add( element ); + this._on( element, { + mouseenter: function( event ) { + this._addClass( $( event.currentTarget ), null, "ui-state-hover" ); + }, + mouseleave: function( event ) { + this._removeClass( $( event.currentTarget ), null, "ui-state-hover" ); + } + } ); + }, + + _focusable: function( element ) { + this.focusable = this.focusable.add( element ); + this._on( element, { + focusin: function( event ) { + this._addClass( $( event.currentTarget ), null, "ui-state-focus" ); + }, + focusout: function( event ) { + this._removeClass( $( event.currentTarget ), null, "ui-state-focus" ); + } + } ); + }, + + _trigger: function( type, event, data ) { + var prop, orig; + var callback = this.options[ type ]; + + data = data || {}; + event = $.Event( event ); + event.type = ( type === this.widgetEventPrefix ? + type : + this.widgetEventPrefix + type ).toLowerCase(); + + // The original event may come from any element + // so we need to reset the target on the new event + event.target = this.element[ 0 ]; + + // Copy original event properties over to the new event + orig = event.originalEvent; + if ( orig ) { + for ( prop in orig ) { + if ( !( prop in event ) ) { + event[ prop ] = orig[ prop ]; + } + } + } + + this.element.trigger( event, data ); + return !( $.isFunction( callback ) && + callback.apply( this.element[ 0 ], [ event ].concat( data ) ) === false || + event.isDefaultPrevented() ); + } +}; + +$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) { + $.Widget.prototype[ "_" + method ] = function( element, options, callback ) { + if ( typeof options === "string" ) { + options = { effect: options }; + } + + var hasOptions; + var effectName = !options ? + method : + options === true || typeof options === "number" ? + defaultEffect : + options.effect || defaultEffect; + + options = options || {}; + if ( typeof options === "number" ) { + options = { duration: options }; + } + + hasOptions = !$.isEmptyObject( options ); + options.complete = callback; + + if ( options.delay ) { + element.delay( options.delay ); + } + + if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) { + element[ method ]( options ); + } else if ( effectName !== method && element[ effectName ] ) { + element[ effectName ]( options.duration, options.easing, callback ); + } else { + element.queue( function( next ) { + $( this )[ method ](); + if ( callback ) { + callback.call( element[ 0 ] ); + } + next(); + } ); + } + }; +} ); + +var widget = $.widget; + + +/*! + * jQuery UI Position 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + * + * http://api.jqueryui.com/position/ + */ + +//>>label: Position +//>>group: Core +//>>description: Positions elements relative to other elements. +//>>docs: http://api.jqueryui.com/position/ +//>>demos: http://jqueryui.com/position/ + + +( function() { +var cachedScrollbarWidth, supportsOffsetFractions, + max = Math.max, + abs = Math.abs, + round = Math.round, + rhorizontal = /left|center|right/, + rvertical = /top|center|bottom/, + roffset = /[\+\-]\d+(\.[\d]+)?%?/, + rposition = /^\w+/, + rpercent = /%$/, + _position = $.fn.position; + +// Support: IE <=9 only +supportsOffsetFractions = function() { + var element = $( "
" ) + .css( "position", "absolute" ) + .appendTo( "body" ) + .offset( { + top: 1.5, + left: 1.5 + } ), + support = element.offset().top === 1.5; + + element.remove(); + + supportsOffsetFractions = function() { + return support; + }; + + return support; +}; + +function getOffsets( offsets, width, height ) { + return [ + parseFloat( offsets[ 0 ] ) * ( rpercent.test( offsets[ 0 ] ) ? width / 100 : 1 ), + parseFloat( offsets[ 1 ] ) * ( rpercent.test( offsets[ 1 ] ) ? height / 100 : 1 ) + ]; +} + +function parseCss( element, property ) { + return parseInt( $.css( element, property ), 10 ) || 0; +} + +function getDimensions( elem ) { + var raw = elem[ 0 ]; + if ( raw.nodeType === 9 ) { + return { + width: elem.width(), + height: elem.height(), + offset: { top: 0, left: 0 } + }; + } + if ( $.isWindow( raw ) ) { + return { + width: elem.width(), + height: elem.height(), + offset: { top: elem.scrollTop(), left: elem.scrollLeft() } + }; + } + if ( raw.preventDefault ) { + return { + width: 0, + height: 0, + offset: { top: raw.pageY, left: raw.pageX } + }; + } + return { + width: elem.outerWidth(), + height: elem.outerHeight(), + offset: elem.offset() + }; +} + +$.position = { + scrollbarWidth: function() { + if ( cachedScrollbarWidth !== undefined ) { + return cachedScrollbarWidth; + } + var w1, w2, + div = $( "
" + + "
" ), + innerDiv = div.children()[ 0 ]; + + $( "body" ).append( div ); + w1 = innerDiv.offsetWidth; + div.css( "overflow", "scroll" ); + + w2 = innerDiv.offsetWidth; + + if ( w1 === w2 ) { + w2 = div[ 0 ].clientWidth; + } + + div.remove(); + + return ( cachedScrollbarWidth = w1 - w2 ); + }, + getScrollInfo: function( within ) { + var overflowX = within.isWindow || within.isDocument ? "" : + within.element.css( "overflow-x" ), + overflowY = within.isWindow || within.isDocument ? "" : + within.element.css( "overflow-y" ), + hasOverflowX = overflowX === "scroll" || + ( overflowX === "auto" && within.width < within.element[ 0 ].scrollWidth ), + hasOverflowY = overflowY === "scroll" || + ( overflowY === "auto" && within.height < within.element[ 0 ].scrollHeight ); + return { + width: hasOverflowY ? $.position.scrollbarWidth() : 0, + height: hasOverflowX ? $.position.scrollbarWidth() : 0 + }; + }, + getWithinInfo: function( element ) { + var withinElement = $( element || window ), + isWindow = $.isWindow( withinElement[ 0 ] ), + isDocument = !!withinElement[ 0 ] && withinElement[ 0 ].nodeType === 9, + hasOffset = !isWindow && !isDocument; + return { + element: withinElement, + isWindow: isWindow, + isDocument: isDocument, + offset: hasOffset ? $( element ).offset() : { left: 0, top: 0 }, + scrollLeft: withinElement.scrollLeft(), + scrollTop: withinElement.scrollTop(), + width: withinElement.outerWidth(), + height: withinElement.outerHeight() + }; + } +}; + +$.fn.position = function( options ) { + if ( !options || !options.of ) { + return _position.apply( this, arguments ); + } + + // Make a copy, we don't want to modify arguments + options = $.extend( {}, options ); + + var atOffset, targetWidth, targetHeight, targetOffset, basePosition, dimensions, + target = $( options.of ), + within = $.position.getWithinInfo( options.within ), + scrollInfo = $.position.getScrollInfo( within ), + collision = ( options.collision || "flip" ).split( " " ), + offsets = {}; + + dimensions = getDimensions( target ); + if ( target[ 0 ].preventDefault ) { + + // Force left top to allow flipping + options.at = "left top"; + } + targetWidth = dimensions.width; + targetHeight = dimensions.height; + targetOffset = dimensions.offset; + + // Clone to reuse original targetOffset later + basePosition = $.extend( {}, targetOffset ); + + // Force my and at to have valid horizontal and vertical positions + // if a value is missing or invalid, it will be converted to center + $.each( [ "my", "at" ], function() { + var pos = ( options[ this ] || "" ).split( " " ), + horizontalOffset, + verticalOffset; + + if ( pos.length === 1 ) { + pos = rhorizontal.test( pos[ 0 ] ) ? + pos.concat( [ "center" ] ) : + rvertical.test( pos[ 0 ] ) ? + [ "center" ].concat( pos ) : + [ "center", "center" ]; + } + pos[ 0 ] = rhorizontal.test( pos[ 0 ] ) ? pos[ 0 ] : "center"; + pos[ 1 ] = rvertical.test( pos[ 1 ] ) ? pos[ 1 ] : "center"; + + // Calculate offsets + horizontalOffset = roffset.exec( pos[ 0 ] ); + verticalOffset = roffset.exec( pos[ 1 ] ); + offsets[ this ] = [ + horizontalOffset ? horizontalOffset[ 0 ] : 0, + verticalOffset ? verticalOffset[ 0 ] : 0 + ]; + + // Reduce to just the positions without the offsets + options[ this ] = [ + rposition.exec( pos[ 0 ] )[ 0 ], + rposition.exec( pos[ 1 ] )[ 0 ] + ]; + } ); + + // Normalize collision option + if ( collision.length === 1 ) { + collision[ 1 ] = collision[ 0 ]; + } + + if ( options.at[ 0 ] === "right" ) { + basePosition.left += targetWidth; + } else if ( options.at[ 0 ] === "center" ) { + basePosition.left += targetWidth / 2; + } + + if ( options.at[ 1 ] === "bottom" ) { + basePosition.top += targetHeight; + } else if ( options.at[ 1 ] === "center" ) { + basePosition.top += targetHeight / 2; + } + + atOffset = getOffsets( offsets.at, targetWidth, targetHeight ); + basePosition.left += atOffset[ 0 ]; + basePosition.top += atOffset[ 1 ]; + + return this.each( function() { + var collisionPosition, using, + elem = $( this ), + elemWidth = elem.outerWidth(), + elemHeight = elem.outerHeight(), + marginLeft = parseCss( this, "marginLeft" ), + marginTop = parseCss( this, "marginTop" ), + collisionWidth = elemWidth + marginLeft + parseCss( this, "marginRight" ) + + scrollInfo.width, + collisionHeight = elemHeight + marginTop + parseCss( this, "marginBottom" ) + + scrollInfo.height, + position = $.extend( {}, basePosition ), + myOffset = getOffsets( offsets.my, elem.outerWidth(), elem.outerHeight() ); + + if ( options.my[ 0 ] === "right" ) { + position.left -= elemWidth; + } else if ( options.my[ 0 ] === "center" ) { + position.left -= elemWidth / 2; + } + + if ( options.my[ 1 ] === "bottom" ) { + position.top -= elemHeight; + } else if ( options.my[ 1 ] === "center" ) { + position.top -= elemHeight / 2; + } + + position.left += myOffset[ 0 ]; + position.top += myOffset[ 1 ]; + + // If the browser doesn't support fractions, then round for consistent results + if ( !supportsOffsetFractions() ) { + position.left = round( position.left ); + position.top = round( position.top ); + } + + collisionPosition = { + marginLeft: marginLeft, + marginTop: marginTop + }; + + $.each( [ "left", "top" ], function( i, dir ) { + if ( $.ui.position[ collision[ i ] ] ) { + $.ui.position[ collision[ i ] ][ dir ]( position, { + targetWidth: targetWidth, + targetHeight: targetHeight, + elemWidth: elemWidth, + elemHeight: elemHeight, + collisionPosition: collisionPosition, + collisionWidth: collisionWidth, + collisionHeight: collisionHeight, + offset: [ atOffset[ 0 ] + myOffset[ 0 ], atOffset [ 1 ] + myOffset[ 1 ] ], + my: options.my, + at: options.at, + within: within, + elem: elem + } ); + } + } ); + + if ( options.using ) { + + // Adds feedback as second argument to using callback, if present + using = function( props ) { + var left = targetOffset.left - position.left, + right = left + targetWidth - elemWidth, + top = targetOffset.top - position.top, + bottom = top + targetHeight - elemHeight, + feedback = { + target: { + element: target, + left: targetOffset.left, + top: targetOffset.top, + width: targetWidth, + height: targetHeight + }, + element: { + element: elem, + left: position.left, + top: position.top, + width: elemWidth, + height: elemHeight + }, + horizontal: right < 0 ? "left" : left > 0 ? "right" : "center", + vertical: bottom < 0 ? "top" : top > 0 ? "bottom" : "middle" + }; + if ( targetWidth < elemWidth && abs( left + right ) < targetWidth ) { + feedback.horizontal = "center"; + } + if ( targetHeight < elemHeight && abs( top + bottom ) < targetHeight ) { + feedback.vertical = "middle"; + } + if ( max( abs( left ), abs( right ) ) > max( abs( top ), abs( bottom ) ) ) { + feedback.important = "horizontal"; + } else { + feedback.important = "vertical"; + } + options.using.call( this, props, feedback ); + }; + } + + elem.offset( $.extend( position, { using: using } ) ); + } ); +}; + +$.ui.position = { + fit: { + left: function( position, data ) { + var within = data.within, + withinOffset = within.isWindow ? within.scrollLeft : within.offset.left, + outerWidth = within.width, + collisionPosLeft = position.left - data.collisionPosition.marginLeft, + overLeft = withinOffset - collisionPosLeft, + overRight = collisionPosLeft + data.collisionWidth - outerWidth - withinOffset, + newOverRight; + + // Element is wider than within + if ( data.collisionWidth > outerWidth ) { + + // Element is initially over the left side of within + if ( overLeft > 0 && overRight <= 0 ) { + newOverRight = position.left + overLeft + data.collisionWidth - outerWidth - + withinOffset; + position.left += overLeft - newOverRight; + + // Element is initially over right side of within + } else if ( overRight > 0 && overLeft <= 0 ) { + position.left = withinOffset; + + // Element is initially over both left and right sides of within + } else { + if ( overLeft > overRight ) { + position.left = withinOffset + outerWidth - data.collisionWidth; + } else { + position.left = withinOffset; + } + } + + // Too far left -> align with left edge + } else if ( overLeft > 0 ) { + position.left += overLeft; + + // Too far right -> align with right edge + } else if ( overRight > 0 ) { + position.left -= overRight; + + // Adjust based on position and margin + } else { + position.left = max( position.left - collisionPosLeft, position.left ); + } + }, + top: function( position, data ) { + var within = data.within, + withinOffset = within.isWindow ? within.scrollTop : within.offset.top, + outerHeight = data.within.height, + collisionPosTop = position.top - data.collisionPosition.marginTop, + overTop = withinOffset - collisionPosTop, + overBottom = collisionPosTop + data.collisionHeight - outerHeight - withinOffset, + newOverBottom; + + // Element is taller than within + if ( data.collisionHeight > outerHeight ) { + + // Element is initially over the top of within + if ( overTop > 0 && overBottom <= 0 ) { + newOverBottom = position.top + overTop + data.collisionHeight - outerHeight - + withinOffset; + position.top += overTop - newOverBottom; + + // Element is initially over bottom of within + } else if ( overBottom > 0 && overTop <= 0 ) { + position.top = withinOffset; + + // Element is initially over both top and bottom of within + } else { + if ( overTop > overBottom ) { + position.top = withinOffset + outerHeight - data.collisionHeight; + } else { + position.top = withinOffset; + } + } + + // Too far up -> align with top + } else if ( overTop > 0 ) { + position.top += overTop; + + // Too far down -> align with bottom edge + } else if ( overBottom > 0 ) { + position.top -= overBottom; + + // Adjust based on position and margin + } else { + position.top = max( position.top - collisionPosTop, position.top ); + } + } + }, + flip: { + left: function( position, data ) { + var within = data.within, + withinOffset = within.offset.left + within.scrollLeft, + outerWidth = within.width, + offsetLeft = within.isWindow ? within.scrollLeft : within.offset.left, + collisionPosLeft = position.left - data.collisionPosition.marginLeft, + overLeft = collisionPosLeft - offsetLeft, + overRight = collisionPosLeft + data.collisionWidth - outerWidth - offsetLeft, + myOffset = data.my[ 0 ] === "left" ? + -data.elemWidth : + data.my[ 0 ] === "right" ? + data.elemWidth : + 0, + atOffset = data.at[ 0 ] === "left" ? + data.targetWidth : + data.at[ 0 ] === "right" ? + -data.targetWidth : + 0, + offset = -2 * data.offset[ 0 ], + newOverRight, + newOverLeft; + + if ( overLeft < 0 ) { + newOverRight = position.left + myOffset + atOffset + offset + data.collisionWidth - + outerWidth - withinOffset; + if ( newOverRight < 0 || newOverRight < abs( overLeft ) ) { + position.left += myOffset + atOffset + offset; + } + } else if ( overRight > 0 ) { + newOverLeft = position.left - data.collisionPosition.marginLeft + myOffset + + atOffset + offset - offsetLeft; + if ( newOverLeft > 0 || abs( newOverLeft ) < overRight ) { + position.left += myOffset + atOffset + offset; + } + } + }, + top: function( position, data ) { + var within = data.within, + withinOffset = within.offset.top + within.scrollTop, + outerHeight = within.height, + offsetTop = within.isWindow ? within.scrollTop : within.offset.top, + collisionPosTop = position.top - data.collisionPosition.marginTop, + overTop = collisionPosTop - offsetTop, + overBottom = collisionPosTop + data.collisionHeight - outerHeight - offsetTop, + top = data.my[ 1 ] === "top", + myOffset = top ? + -data.elemHeight : + data.my[ 1 ] === "bottom" ? + data.elemHeight : + 0, + atOffset = data.at[ 1 ] === "top" ? + data.targetHeight : + data.at[ 1 ] === "bottom" ? + -data.targetHeight : + 0, + offset = -2 * data.offset[ 1 ], + newOverTop, + newOverBottom; + if ( overTop < 0 ) { + newOverBottom = position.top + myOffset + atOffset + offset + data.collisionHeight - + outerHeight - withinOffset; + if ( newOverBottom < 0 || newOverBottom < abs( overTop ) ) { + position.top += myOffset + atOffset + offset; + } + } else if ( overBottom > 0 ) { + newOverTop = position.top - data.collisionPosition.marginTop + myOffset + atOffset + + offset - offsetTop; + if ( newOverTop > 0 || abs( newOverTop ) < overBottom ) { + position.top += myOffset + atOffset + offset; + } + } + } + }, + flipfit: { + left: function() { + $.ui.position.flip.left.apply( this, arguments ); + $.ui.position.fit.left.apply( this, arguments ); + }, + top: function() { + $.ui.position.flip.top.apply( this, arguments ); + $.ui.position.fit.top.apply( this, arguments ); + } + } +}; + +} )(); + +var position = $.ui.position; + + +/*! + * jQuery UI :data 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: :data Selector +//>>group: Core +//>>description: Selects elements which have data stored under the specified key. +//>>docs: http://api.jqueryui.com/data-selector/ + + +var data = $.extend( $.expr[ ":" ], { + data: $.expr.createPseudo ? + $.expr.createPseudo( function( dataName ) { + return function( elem ) { + return !!$.data( elem, dataName ); + }; + } ) : + + // Support: jQuery <1.8 + function( elem, i, match ) { + return !!$.data( elem, match[ 3 ] ); + } +} ); + +/*! + * jQuery UI Disable Selection 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: disableSelection +//>>group: Core +//>>description: Disable selection of text content within the set of matched elements. +//>>docs: http://api.jqueryui.com/disableSelection/ + +// This file is deprecated + + +var disableSelection = $.fn.extend( { + disableSelection: ( function() { + var eventType = "onselectstart" in document.createElement( "div" ) ? + "selectstart" : + "mousedown"; + + return function() { + return this.on( eventType + ".ui-disableSelection", function( event ) { + event.preventDefault(); + } ); + }; + } )(), + + enableSelection: function() { + return this.off( ".ui-disableSelection" ); + } +} ); + + +/*! + * jQuery UI Focusable 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: :focusable Selector +//>>group: Core +//>>description: Selects elements which can be focused. +//>>docs: http://api.jqueryui.com/focusable-selector/ + + + +// Selectors +$.ui.focusable = function( element, hasTabindex ) { + var map, mapName, img, focusableIfVisible, fieldset, + nodeName = element.nodeName.toLowerCase(); + + if ( "area" === nodeName ) { + map = element.parentNode; + mapName = map.name; + if ( !element.href || !mapName || map.nodeName.toLowerCase() !== "map" ) { + return false; + } + img = $( "img[usemap='#" + mapName + "']" ); + return img.length > 0 && img.is( ":visible" ); + } + + if ( /^(input|select|textarea|button|object)$/.test( nodeName ) ) { + focusableIfVisible = !element.disabled; + + if ( focusableIfVisible ) { + + // Form controls within a disabled fieldset are disabled. + // However, controls within the fieldset's legend do not get disabled. + // Since controls generally aren't placed inside legends, we skip + // this portion of the check. + fieldset = $( element ).closest( "fieldset" )[ 0 ]; + if ( fieldset ) { + focusableIfVisible = !fieldset.disabled; + } + } + } else if ( "a" === nodeName ) { + focusableIfVisible = element.href || hasTabindex; + } else { + focusableIfVisible = hasTabindex; + } + + return focusableIfVisible && $( element ).is( ":visible" ) && visible( $( element ) ); +}; + +// Support: IE 8 only +// IE 8 doesn't resolve inherit to visible/hidden for computed values +function visible( element ) { + var visibility = element.css( "visibility" ); + while ( visibility === "inherit" ) { + element = element.parent(); + visibility = element.css( "visibility" ); + } + return visibility !== "hidden"; +} + +$.extend( $.expr[ ":" ], { + focusable: function( element ) { + return $.ui.focusable( element, $.attr( element, "tabindex" ) != null ); + } +} ); + +var focusable = $.ui.focusable; + + + + +// Support: IE8 Only +// IE8 does not support the form attribute and when it is supplied. It overwrites the form prop +// with a string, so we need to find the proper form. +var form = $.fn.form = function() { + return typeof this[ 0 ].form === "string" ? this.closest( "form" ) : $( this[ 0 ].form ); +}; + + +/*! + * jQuery UI Form Reset Mixin 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: Form Reset Mixin +//>>group: Core +//>>description: Refresh input widgets when their form is reset +//>>docs: http://api.jqueryui.com/form-reset-mixin/ + + + +var formResetMixin = $.ui.formResetMixin = { + _formResetHandler: function() { + var form = $( this ); + + // Wait for the form reset to actually happen before refreshing + setTimeout( function() { + var instances = form.data( "ui-form-reset-instances" ); + $.each( instances, function() { + this.refresh(); + } ); + } ); + }, + + _bindFormResetHandler: function() { + this.form = this.element.form(); + if ( !this.form.length ) { + return; + } + + var instances = this.form.data( "ui-form-reset-instances" ) || []; + if ( !instances.length ) { + + // We don't use _on() here because we use a single event handler per form + this.form.on( "reset.ui-form-reset", this._formResetHandler ); + } + instances.push( this ); + this.form.data( "ui-form-reset-instances", instances ); + }, + + _unbindFormResetHandler: function() { + if ( !this.form.length ) { + return; + } + + var instances = this.form.data( "ui-form-reset-instances" ); + instances.splice( $.inArray( this, instances ), 1 ); + if ( instances.length ) { + this.form.data( "ui-form-reset-instances", instances ); + } else { + this.form + .removeData( "ui-form-reset-instances" ) + .off( "reset.ui-form-reset" ); + } + } +}; + + +/*! + * jQuery UI Support for jQuery core 1.7.x 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + * + */ + +//>>label: jQuery 1.7 Support +//>>group: Core +//>>description: Support version 1.7.x of jQuery core + + + +// Support: jQuery 1.7 only +// Not a great way to check versions, but since we only support 1.7+ and only +// need to detect <1.8, this is a simple check that should suffice. Checking +// for "1.7." would be a bit safer, but the version string is 1.7, not 1.7.0 +// and we'll never reach 1.70.0 (if we do, we certainly won't be supporting +// 1.7 anymore). See #11197 for why we're not using feature detection. +if ( $.fn.jquery.substring( 0, 3 ) === "1.7" ) { + + // Setters for .innerWidth(), .innerHeight(), .outerWidth(), .outerHeight() + // Unlike jQuery Core 1.8+, these only support numeric values to set the + // dimensions in pixels + $.each( [ "Width", "Height" ], function( i, name ) { + var side = name === "Width" ? [ "Left", "Right" ] : [ "Top", "Bottom" ], + type = name.toLowerCase(), + orig = { + innerWidth: $.fn.innerWidth, + innerHeight: $.fn.innerHeight, + outerWidth: $.fn.outerWidth, + outerHeight: $.fn.outerHeight + }; + + function reduce( elem, size, border, margin ) { + $.each( side, function() { + size -= parseFloat( $.css( elem, "padding" + this ) ) || 0; + if ( border ) { + size -= parseFloat( $.css( elem, "border" + this + "Width" ) ) || 0; + } + if ( margin ) { + size -= parseFloat( $.css( elem, "margin" + this ) ) || 0; + } + } ); + return size; + } + + $.fn[ "inner" + name ] = function( size ) { + if ( size === undefined ) { + return orig[ "inner" + name ].call( this ); + } + + return this.each( function() { + $( this ).css( type, reduce( this, size ) + "px" ); + } ); + }; + + $.fn[ "outer" + name ] = function( size, margin ) { + if ( typeof size !== "number" ) { + return orig[ "outer" + name ].call( this, size ); + } + + return this.each( function() { + $( this ).css( type, reduce( this, size, true, margin ) + "px" ); + } ); + }; + } ); + + $.fn.addBack = function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + }; +} + +; +/*! + * jQuery UI Keycode 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: Keycode +//>>group: Core +//>>description: Provide keycodes as keynames +//>>docs: http://api.jqueryui.com/jQuery.ui.keyCode/ + + +var keycode = $.ui.keyCode = { + BACKSPACE: 8, + COMMA: 188, + DELETE: 46, + DOWN: 40, + END: 35, + ENTER: 13, + ESCAPE: 27, + HOME: 36, + LEFT: 37, + PAGE_DOWN: 34, + PAGE_UP: 33, + PERIOD: 190, + RIGHT: 39, + SPACE: 32, + TAB: 9, + UP: 38 +}; + + + + +// Internal use only +var escapeSelector = $.ui.escapeSelector = ( function() { + var selectorEscape = /([!"#$%&'()*+,./:;<=>?@[\]^`{|}~])/g; + return function( selector ) { + return selector.replace( selectorEscape, "\\$1" ); + }; +} )(); + + +/*! + * jQuery UI Labels 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: labels +//>>group: Core +//>>description: Find all the labels associated with a given input +//>>docs: http://api.jqueryui.com/labels/ + + + +var labels = $.fn.labels = function() { + var ancestor, selector, id, labels, ancestors; + + // Check control.labels first + if ( this[ 0 ].labels && this[ 0 ].labels.length ) { + return this.pushStack( this[ 0 ].labels ); + } + + // Support: IE <= 11, FF <= 37, Android <= 2.3 only + // Above browsers do not support control.labels. Everything below is to support them + // as well as document fragments. control.labels does not work on document fragments + labels = this.eq( 0 ).parents( "label" ); + + // Look for the label based on the id + id = this.attr( "id" ); + if ( id ) { + + // We don't search against the document in case the element + // is disconnected from the DOM + ancestor = this.eq( 0 ).parents().last(); + + // Get a full set of top level ancestors + ancestors = ancestor.add( ancestor.length ? ancestor.siblings() : this.siblings() ); + + // Create a selector for the label based on the id + selector = "label[for='" + $.ui.escapeSelector( id ) + "']"; + + labels = labels.add( ancestors.find( selector ).addBack( selector ) ); + + } + + // Return whatever we have found for labels + return this.pushStack( labels ); +}; + + +/*! + * jQuery UI Scroll Parent 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: scrollParent +//>>group: Core +//>>description: Get the closest ancestor element that is scrollable. +//>>docs: http://api.jqueryui.com/scrollParent/ + + + +var scrollParent = $.fn.scrollParent = function( includeHidden ) { + var position = this.css( "position" ), + excludeStaticParent = position === "absolute", + overflowRegex = includeHidden ? /(auto|scroll|hidden)/ : /(auto|scroll)/, + scrollParent = this.parents().filter( function() { + var parent = $( this ); + if ( excludeStaticParent && parent.css( "position" ) === "static" ) { + return false; + } + return overflowRegex.test( parent.css( "overflow" ) + parent.css( "overflow-y" ) + + parent.css( "overflow-x" ) ); + } ).eq( 0 ); + + return position === "fixed" || !scrollParent.length ? + $( this[ 0 ].ownerDocument || document ) : + scrollParent; +}; + + +/*! + * jQuery UI Tabbable 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: :tabbable Selector +//>>group: Core +//>>description: Selects elements which can be tabbed to. +//>>docs: http://api.jqueryui.com/tabbable-selector/ + + + +var tabbable = $.extend( $.expr[ ":" ], { + tabbable: function( element ) { + var tabIndex = $.attr( element, "tabindex" ), + hasTabindex = tabIndex != null; + return ( !hasTabindex || tabIndex >= 0 ) && $.ui.focusable( element, hasTabindex ); + } +} ); + + +/*! + * jQuery UI Unique ID 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: uniqueId +//>>group: Core +//>>description: Functions to generate and remove uniqueId's +//>>docs: http://api.jqueryui.com/uniqueId/ + + + +var uniqueId = $.fn.extend( { + uniqueId: ( function() { + var uuid = 0; + + return function() { + return this.each( function() { + if ( !this.id ) { + this.id = "ui-id-" + ( ++uuid ); + } + } ); + }; + } )(), + + removeUniqueId: function() { + return this.each( function() { + if ( /^ui-id-\d+$/.test( this.id ) ) { + $( this ).removeAttr( "id" ); + } + } ); + } +} ); + + + + +// This file is deprecated +var ie = $.ui.ie = !!/msie [\w.]+/.exec( navigator.userAgent.toLowerCase() ); + +/*! + * jQuery UI Mouse 1.12.0 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ + +//>>label: Mouse +//>>group: Widgets +//>>description: Abstracts mouse-based interactions to assist in creating certain widgets. +//>>docs: http://api.jqueryui.com/mouse/ + + + +var mouseHandled = false; +$( document ).on( "mouseup", function() { + mouseHandled = false; +} ); + +var widgetsMouse = $.widget( "ui.mouse", { + version: "1.12.0", + options: { + cancel: "input, textarea, button, select, option", + distance: 1, + delay: 0 + }, + _mouseInit: function() { + var that = this; + + this.element + .on( "mousedown." + this.widgetName, function( event ) { + return that._mouseDown( event ); + } ) + .on( "click." + this.widgetName, function( event ) { + if ( true === $.data( event.target, that.widgetName + ".preventClickEvent" ) ) { + $.removeData( event.target, that.widgetName + ".preventClickEvent" ); + event.stopImmediatePropagation(); + return false; + } + } ); + + this.started = false; + }, + + // TODO: make sure destroying one instance of mouse doesn't mess with + // other instances of mouse + _mouseDestroy: function() { + this.element.off( "." + this.widgetName ); + if ( this._mouseMoveDelegate ) { + this.document + .off( "mousemove." + this.widgetName, this._mouseMoveDelegate ) + .off( "mouseup." + this.widgetName, this._mouseUpDelegate ); + } + }, + + _mouseDown: function( event ) { + + // don't let more than one widget handle mouseStart + if ( mouseHandled ) { + return; + } + + this._mouseMoved = false; + + // We may have missed mouseup (out of window) + ( this._mouseStarted && this._mouseUp( event ) ); + + this._mouseDownEvent = event; + + var that = this, + btnIsLeft = ( event.which === 1 ), + + // event.target.nodeName works around a bug in IE 8 with + // disabled inputs (#7620) + elIsCancel = ( typeof this.options.cancel === "string" && event.target.nodeName ? + $( event.target ).closest( this.options.cancel ).length : false ); + if ( !btnIsLeft || elIsCancel || !this._mouseCapture( event ) ) { + return true; + } + + this.mouseDelayMet = !this.options.delay; + if ( !this.mouseDelayMet ) { + this._mouseDelayTimer = setTimeout( function() { + that.mouseDelayMet = true; + }, this.options.delay ); + } + + if ( this._mouseDistanceMet( event ) && this._mouseDelayMet( event ) ) { + this._mouseStarted = ( this._mouseStart( event ) !== false ); + if ( !this._mouseStarted ) { + event.preventDefault(); + return true; + } + } + + // Click event may never have fired (Gecko & Opera) + if ( true === $.data( event.target, this.widgetName + ".preventClickEvent" ) ) { + $.removeData( event.target, this.widgetName + ".preventClickEvent" ); + } + + // These delegates are required to keep context + this._mouseMoveDelegate = function( event ) { + return that._mouseMove( event ); + }; + this._mouseUpDelegate = function( event ) { + return that._mouseUp( event ); + }; + + this.document + .on( "mousemove." + this.widgetName, this._mouseMoveDelegate ) + .on( "mouseup." + this.widgetName, this._mouseUpDelegate ); + + event.preventDefault(); + + mouseHandled = true; + return true; + }, + + _mouseMove: function( event ) { + + // Only check for mouseups outside the document if you've moved inside the document + // at least once. This prevents the firing of mouseup in the case of IE<9, which will + // fire a mousemove event if content is placed under the cursor. See #7778 + // Support: IE <9 + if ( this._mouseMoved ) { + + // IE mouseup check - mouseup happened when mouse was out of window + if ( $.ui.ie && ( !document.documentMode || document.documentMode < 9 ) && + !event.button ) { + return this._mouseUp( event ); + + // Iframe mouseup check - mouseup occurred in another document + } else if ( !event.which ) { + + // Support: Safari <=8 - 9 + // Safari sets which to 0 if you press any of the following keys + // during a drag (#14461) + if ( event.originalEvent.altKey || event.originalEvent.ctrlKey || + event.originalEvent.metaKey || event.originalEvent.shiftKey ) { + this.ignoreMissingWhich = true; + } else if ( !this.ignoreMissingWhich ) { + return this._mouseUp( event ); + } + } + } + + if ( event.which || event.button ) { + this._mouseMoved = true; + } + + if ( this._mouseStarted ) { + this._mouseDrag( event ); + return event.preventDefault(); + } + + if ( this._mouseDistanceMet( event ) && this._mouseDelayMet( event ) ) { + this._mouseStarted = + ( this._mouseStart( this._mouseDownEvent, event ) !== false ); + ( this._mouseStarted ? this._mouseDrag( event ) : this._mouseUp( event ) ); + } + + return !this._mouseStarted; + }, + + _mouseUp: function( event ) { + this.document + .off( "mousemove." + this.widgetName, this._mouseMoveDelegate ) + .off( "mouseup." + this.widgetName, this._mouseUpDelegate ); + + if ( this._mouseStarted ) { + this._mouseStarted = false; + + if ( event.target === this._mouseDownEvent.target ) { + $.data( event.target, this.widgetName + ".preventClickEvent", true ); + } + + this._mouseStop( event ); + } + + if ( this._mouseDelayTimer ) { + clearTimeout( this._mouseDelayTimer ); + delete this._mouseDelayTimer; + } + + this.ignoreMissingWhich = false; + mouseHandled = false; + event.preventDefault(); + }, + + _mouseDistanceMet: function( event ) { + return ( Math.max( + Math.abs( this._mouseDownEvent.pageX - event.pageX ), + Math.abs( this._mouseDownEvent.pageY - event.pageY ) + ) >= this.options.distance + ); + }, + + _mouseDelayMet: function( /* event */ ) { + return this.mouseDelayMet; + }, + + // These are placeholder methods, to be overriden by extending plugin + _mouseStart: function( /* event */ ) {}, + _mouseDrag: function( /* event */ ) {}, + _mouseStop: function( /* event */ ) {}, + _mouseCapture: function( /* event */ ) { return true; } +} ); + + + + +// $.ui.plugin is deprecated. Use $.widget() extensions instead. +var plugin = $.ui.plugin = { + add: function( module, option, set ) { + var i, + proto = $.ui[ module ].prototype; + for ( i in set ) { + proto.plugins[ i ] = proto.plugins[ i ] || []; + proto.plugins[ i ].push( [ option, set[ i ] ] ); + } + }, + call: function( instance, name, args, allowDisconnected ) { + var i, + set = instance.plugins[ name ]; + + if ( !set ) { + return; + } + + if ( !allowDisconnected && ( !instance.element[ 0 ].parentNode || + instance.element[ 0 ].parentNode.nodeType === 11 ) ) { + return; + } + + for ( i = 0; i < set.length; i++ ) { + if ( instance.options[ set[ i ][ 0 ] ] ) { + set[ i ][ 1 ].apply( instance.element, args ); + } + } + } +}; + + + +var safeActiveElement = $.ui.safeActiveElement = function( document ) { + var activeElement; + + // Support: IE 9 only + // IE9 throws an "Unspecified error" accessing document.activeElement from an ' + ).bind('load', function () { + var fileInputClones, + paramNames = $.isArray(options.paramName) ? + options.paramName : [options.paramName]; + iframe + .unbind('load') + .bind('load', function () { + var response; + // Wrap in a try/catch block to catch exceptions thrown + // when trying to access cross-domain iframe contents: + try { + response = iframe.contents(); + // Google Chrome and Firefox do not throw an + // exception when calling iframe.contents() on + // cross-domain requests, so we unify the response: + if (!response.length || !response[0].firstChild) { + throw new Error(); + } + } catch (e) { + response = undefined; + } + // The complete callback returns the + // iframe content document as response object: + completeCallback( + 200, + 'success', + {'iframe': response} + ); + // Fix for IE endless progress bar activity bug + // (happens on form submits to iframe targets): + $('') + .appendTo(form); + window.setTimeout(function () { + // Removing the form in a setTimeout call + // allows Chrome's developer tools to display + // the response result + form.remove(); + }, 0); + }); + form + .prop('target', iframe.prop('name')) + .prop('action', options.url) + .prop('method', options.type); + if (options.formData) { + $.each(options.formData, function (index, field) { + $('') + .prop('name', field.name) + .val(field.value) + .appendTo(form); + }); + } + if (options.fileInput && options.fileInput.length && + options.type === 'POST') { + fileInputClones = options.fileInput.clone(); + // Insert a clone for each file input field: + options.fileInput.after(function (index) { + return fileInputClones[index]; + }); + if (options.paramName) { + options.fileInput.each(function (index) { + $(this).prop( + 'name', + paramNames[index] || options.paramName + ); + }); + } + // Appending the file input fields to the hidden form + // removes them from their original location: + form + .append(options.fileInput) + .prop('enctype', 'multipart/form-data') + // enctype must be set as encoding for IE: + .prop('encoding', 'multipart/form-data'); + // Remove the HTML5 form attribute from the input(s): + options.fileInput.removeAttr('form'); + } + form.submit(); + // Insert the file input fields at their original location + // by replacing the clones with the originals: + if (fileInputClones && fileInputClones.length) { + options.fileInput.each(function (index, input) { + var clone = $(fileInputClones[index]); + // Restore the original name and form properties: + $(input) + .prop('name', clone.prop('name')) + .attr('form', clone.attr('form')); + clone.replaceWith(input); + }); + } + }); + form.append(iframe).appendTo(document.body); + }, + abort: function () { + if (iframe) { + // javascript:false as iframe src aborts the request + // and prevents warning popups on HTTPS in IE6. + // concat is used to avoid the "Script URL" JSLint error: + iframe + .unbind('load') + .prop('src', initialIframeSrc); + } + if (form) { + form.remove(); + } + } + }; + } + }); + + // The iframe transport returns the iframe content document as response. + // The following adds converters from iframe to text, json, html, xml + // and script. + // Please note that the Content-Type for JSON responses has to be text/plain + // or text/html, if the browser doesn't include application/json in the + // Accept header, else IE will show a download dialog. + // The Content-Type for XML responses on the other hand has to be always + // application/xml or text/xml, so IE properly parses the XML response. + // See also + // https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation + $.ajaxSetup({ + converters: { + 'iframe text': function (iframe) { + return iframe && $(iframe[0].body).text(); + }, + 'iframe json': function (iframe) { + return iframe && $.parseJSON($(iframe[0].body).text()); + }, + 'iframe html': function (iframe) { + return iframe && $(iframe[0].body).html(); + }, + 'iframe xml': function (iframe) { + var xmlDoc = iframe && iframe[0]; + return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc : + $.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) || + $(xmlDoc.body).html()); + }, + 'iframe script': function (iframe) { + return iframe && $.globalEval($(iframe[0].body).text()); + } + } + }); + +})); diff --git a/XMLValidatorWeb/Scripts/jquery.multilevelpushmenu.js b/XMLValidatorWeb/Scripts/jquery.multilevelpushmenu.js new file mode 100644 index 0000000..1224173 --- /dev/null +++ b/XMLValidatorWeb/Scripts/jquery.multilevelpushmenu.js @@ -0,0 +1,1140 @@ +/** + * jquery.multilevelpushmenu.js v2.1.4 + * + * Licensed under the MIT license. + * http://www.opensource.org/licenses/mit-license.php + * + * Copyright 2013-2014, Make IT d.o.o. + * http://multi-level-push-menu.make.rs + * https://github.com/adgsm/multi-level-push-menu + */ +(function ( $ ) { + $.fn.multilevelpushmenu = function( options ) { + "use strict"; + var args = arguments, + returnValue = null; + + this.each(function(){ + var instance = this, + $this = $( this ), + $container = ( $this.context != undefined ) ? $this : $( 'body' ), + menu = ( options && options.menu != undefined ) ? options.menu : $this.find( 'nav' ), + clickEventType, dragEventType; + + // Settings + var settings = $.extend({ + container: $container, + containersToPush: null, + menuID: ( ( $container.prop( 'id' ) != undefined && $container.prop( 'id' ) != '' ) ? $container.prop( 'id' ) : this.nodeName.toLowerCase() ) + "_multilevelpushmenu", + wrapperClass: 'multilevelpushmenu_wrapper', + menuInactiveClass: 'multilevelpushmenu_inactive', + menu: menu, + menuWidth: 0, + menuHeight: 0, + collapsed: false, + fullCollapse: false, + direction: 'ltr', + backText: 'Back', + backItemClass: 'backItemClass', + backItemIcon: 'fa fa-angle-right', + groupIcon: 'fa fa-angle-left', + mode: 'overlap', + overlapWidth: 40, + preventItemClick: true, + preventGroupItemClick: true, + swipe: 'both', + durationSlideOut: 400, + durationSlideDown: 500, + durationTransition: 400, + onCollapseMenuStart: function() {}, + onCollapseMenuEnd: function() {}, + onExpandMenuStart: function() {}, + onExpandMenuEnd: function() {}, + onGroupItemClick: function() {}, + onItemClick: function() {}, + onTitleItemClick: function() {}, + onBackItemClick: function() {}, + onMenuReady: function() {}, + onMenuSwipe: function() {} + }, options ); + + // Store a settings reference withint the element's data + if (!$.data(instance, 'plugin_multilevelpushmenu')) { + $.data(instance, 'plugin_multilevelpushmenu', settings); + instance.settings = $.data(instance, 'plugin_multilevelpushmenu'); + } + + // Exposed methods + var methods = { + // Initialize menu + init: function () { + return initialize.apply(this, Array.prototype.slice.call(arguments)); + }, + // Collapse menu + collapse: function () { + return collapseMenu.apply(this, Array.prototype.slice.call(arguments)); + }, + // Expand menu + expand: function () { + return expandMenu.apply(this, Array.prototype.slice.call(arguments)); + }, + // Menu expanded + menuexpanded: function () { + return menuExpanded.apply(this, Array.prototype.slice.call(arguments)); + }, + // Active menu + activemenu: function () { + return activeMenu.apply(this, Array.prototype.slice.call(arguments)); + }, + // Find menu(s) by title + findmenusbytitle: function () { + return findMenusByTitle.apply(this, Array.prototype.slice.call(arguments)); + }, + // Find item(s) by name + finditemsbyname: function () { + return findItemsByName.apply(this, Array.prototype.slice.call(arguments)); + }, + // Find path to root menu collection + pathtoroot: function () { + return pathToRoot.apply(this, Array.prototype.slice.call(arguments)); + }, + // Find shared path to root of two menus + comparepaths: function () { + return comparePaths.apply(this, Array.prototype.slice.call(arguments)); + }, + // Get/Set settings options + option: function () { + return manageOptions.apply(this, Array.prototype.slice.call(arguments)); + }, + // Add item(s) + additems: function () { + return addItems.apply(this, Array.prototype.slice.call(arguments)); + }, + // Remove item(s) + removeitems: function () { + return removeItems.apply(this, Array.prototype.slice.call(arguments)); + }, + // Size DOM elements + redraw: function () { + return sizeDOMelements.apply(this, Array.prototype.slice.call(arguments)); + }, + // Returns visible level holders + visiblemenus: function () { + return visibleLevelHolders.apply(this, Array.prototype.slice.call(arguments)); + }, + // Returns visible level holders + hiddenmenus: function () { + return hiddenLevelHolders.apply(this, Array.prototype.slice.call(arguments)); + }, + // Propagate event to underneath layer + propagateevent: function () { + return propagateEvent.apply(this, Array.prototype.slice.call(arguments)); + } + }; + + // IE 8 and modern browsers, prevent event propagation + function stopEventPropagation( e ){ + if ( e.stopPropagation && e.preventDefault ) { + e.stopPropagation(); + e.preventDefault(); + } + else { + e.cancelBubble = true; + e.returnValue = false; + } + } + + // propagate event to underneath layer + // http://jsfiddle.net/E9zTs/2/ + function propagateEvent( $element , event ) { + if( $element == undefined || event == undefined ) return false; + $element.on( event , function ( e , ee ) { + $element.hide(); + try { + if(!e.pageX || !e.pageY) return false; + ee = ee || { + pageX: e.pageX, + pageY: e.pageY + }; + var next = document.elementFromPoint( ee.pageX , ee.pageY ); + next = ( next.nodeType == 3 ) ? next.parentNode : next //Opera + $( next ).trigger( event , ee ); + } + catch ( err ) { + $.error( 'Error while propagating event: ' + err.message ); + } + finally { + $element.show(); + } + }); + } + + // Create DOM structure if it does not already exist within the container (input: array) + function createDOMStructure() { + var $mainWrapper = $( "