diff --git a/.gitignore b/.gitignore index a5d233be..1a31535f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,34 +1,27 @@ - *~ -*.dll *.*~ *.mdb *.pdb *.pidb -*.vcproj *.user *.ncb *.aps *.suo -*.sln *.userprefs *.usertasks *.orig *.cache *.ReSharper -*/bin/* -*/obj/* -*/test-results/* -/test-results/ - -/examples/*/bin/* -/examples/*/test-results/* -/examples/*/obj/* +bin/ +obj/ +deploy/ +test-results/ +test-results/ +/Release/* /_UpgradeReport_Files/* /redist/*.zip *_ReSharper.*/** ->>>>>>> dc06fc695339fb920a153bad7a131c47f57ac7d7:.gitignore diff --git a/MongoDB.GridFS/AssemblyInfo.cs b/AssemblyInfoGlobal.cs similarity index 50% rename from MongoDB.GridFS/AssemblyInfo.cs rename to AssemblyInfoGlobal.cs index 65dd20a6..d595b24c 100644 --- a/MongoDB.GridFS/AssemblyInfo.cs +++ b/AssemblyInfoGlobal.cs @@ -1,26 +1,16 @@ -using System.Reflection; -using System.Runtime.CompilerServices; +using System.Reflection; // Information about this assembly is defined by the following attributes. // Change them to the values specific to your project. -[assembly: AssemblyTitle("MongoDB.GridFS")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("")] -[assembly: AssemblyCopyright("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("MongoDB-CSharp")] +[assembly: AssemblyCopyright("MongoDB-CSharp")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". // The form "{Major}.{Minor}.*" will automatically update the build and revision, // and "{Major}.{Minor}.{Build}.*" will update just the revision. - -[assembly: AssemblyVersion("1.0.*")] - -// The following attributes are used to specify the signing key for the assembly, -// if desired. See the Mono documentation for more information about signing. - -[assembly: AssemblyDelaySign(false)] -[assembly: AssemblyKeyFile("")] + +[assembly: AssemblyVersion("0.90.0.1")] diff --git a/Deploy.proj b/Deploy.proj new file mode 100644 index 00000000..133683f8 --- /dev/null +++ b/Deploy.proj @@ -0,0 +1,61 @@ + + + + + + $(MSBuildProjectDirectory)\tools\MSBuild.Community.Tasks + $(MSBuildProjectDirectory)\tools\MSBuild.ExtensionPack + $(MSBuildProjectDirectory)\redist + $(DeployPath)\tmp + $(MSBuildProjectDirectory)\MongoDB-CSharp.sln + + + + + + + + + + + + + + + + + + + $(DeployPath)\MongoDB-CSharp.zip + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/MongoDB-CSharp-2010.sln b/MongoDB-CSharp-2010.sln new file mode 100644 index 00000000..5b360815 --- /dev/null +++ b/MongoDB-CSharp-2010.sln @@ -0,0 +1,116 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{4345382B-FAA2-46E2-99CF-C90ACA2DD574}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tools", "Tools", "{AEFFB75E-9365-4BC6-87E9-148CEECA0C0C}" +EndProject +Project("{F184B08F-C81C-45F6-A57F-5ABD9991F28F}") = "SimpleVB", "examples\SimpleVB\SimpleVB.vbproj", "{1BAAE3D8-7720-4AA5-9335-E59824E7B667}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Simple", "examples\Simple\Simple.csproj", "{131BDB5F-5C6F-4AC7-B03E-394B1B75E120}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB", "source\MongoDB\MongoDB.csproj", "{B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS", "source\MongoDB.GridFS\MongoDB.GridFS.csproj", "{B42DBBF9-0A1F-4749-9787-013BF8D8F435}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS.Tests", "source\MongoDB.GridFS.Tests\MongoDB.GridFS.Tests.csproj", "{0C293FE9-F670-4FEF-A60F-20F8C978B1CD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Tests", "source\MongoDB.Tests\MongoDB.Tests.csproj", "{C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Benchmark", "tools\Benchmark\Benchmark.csproj", "{5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Release|Any CPU.Build.0 = Release|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.Build.0 = Debug|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.ActiveCfg = Release|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.Build.0 = Release|Any CPU + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Release|Any CPU.Build.0 = Release|Any CPU + {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.Build.0 = Release|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.Build.0 = Release|Any CPU + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.Build.0 = Release|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {1BAAE3D8-7720-4AA5-9335-E59824E7B667} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B} = {AEFFB75E-9365-4BC6-87E9-148CEECA0C0C} + EndGlobalSection + GlobalSection(MonoDevelopProperties) = preSolution + StartupItem = MongoDBDriver\MongoDB.Driver.csproj + Policies = $0 + $0.DotNetNamingPolicy = $1 + $1.DirectoryNamespaceAssociation = None + $1.ResourceNamePolicy = FileFormatDefault + $0.TextStylePolicy = $2 + $2.FileWidth = 120 + $2.TabWidth = 4 + $2.inheritsSet = Mono + $2.inheritsScope = text/plain + $2.scope = text/plain + $0.TextStylePolicy = $3 + $3.FileWidth = 120 + $3.NoTabsAfterNonTabs = True + $3.inheritsSet = VisualStudio + $3.inheritsScope = text/plain + $3.scope = text/x-csharp + $0.CSharpFormattingPolicy = $4 + $4.IndentSwitchBody = True + $4.MethodBraceStyle = EndOfLineWithoutSpace + $4.BeforeMethodCallParentheses = False + $4.BeforeMethodDeclarationParentheses = False + $4.BeforeConstructorDeclarationParentheses = False + $4.BeforeDelegateDeclarationParentheses = False + $4.NewParentheses = False + $4.MethodBraceStyle = EndOfLine + $4.ConstructorBraceStyle = EndOfLine + $4.DestructorBraceStyle = EndOfLine + $4.BeforeMethodCallParentheses = False + $4.BeforeMethodDeclarationParentheses = False + $4.BeforeConstructorDeclarationParentheses = False + $4.NewParentheses = False + $4.IfParentheses = False + $4.WhileParentheses = False + $4.ForParentheses = False + $4.ForeachParentheses = False + $4.CatchParentheses = False + $4.SwitchParentheses = False + $4.LockParentheses = False + $4.UsingParentheses = False + $4.inheritsSet = Mono + $4.inheritsScope = text/x-csharp + $4.scope = text/x-csharp + $0.TextStylePolicy = $5 + $5.FileWidth = 120 + $5.EolMarker = Windows + $5.inheritsSet = VisualStudio + $5.inheritsScope = text/plain + $5.scope = text/x-vb + EndGlobalSection +EndGlobal diff --git a/MongoDBDriver.sln b/MongoDB-CSharp.sln similarity index 65% rename from MongoDBDriver.sln rename to MongoDB-CSharp.sln index fc6814ab..007cc71d 100644 --- a/MongoDBDriver.sln +++ b/MongoDB-CSharp.sln @@ -1,30 +1,23 @@  Microsoft Visual Studio Solution File, Format Version 10.00 # Visual Studio 2008 -# SharpDevelop 3.1.0.4890 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver", "MongoDBDriver\MongoDB.Driver.csproj", "{B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}" -EndProject -Project("{9344bdbb-3e7f-41fc-a0dd-8665d75ee146}") = "Packages", "Packages.mdproj", "{502F3381-58AA-461B-B9D8-12578A588C61}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver.Tests", "MongoDB.Net-Tests\MongoDB.Driver.Tests.csproj", "{C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}" +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{4345382B-FAA2-46E2-99CF-C90ACA2DD574}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Linq", "MongoDB.Linq\MongoDB.Linq.csproj", "{2E48891E-72F9-445D-9A5A-DBA787BFFE9E}" +Project("{F184B08F-C81C-45F6-A57F-5ABD9991F28F}") = "SimpleVB", "examples\SimpleVB\SimpleVB.vbproj", "{1BAAE3D8-7720-4AA5-9335-E59824E7B667}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Linq.Tests", "MongoDB.Linq.Tests\MongoDB.Linq.Tests.csproj", "{870FE8E1-3461-4C79-BF25-9C35E41BF582}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Simple", "examples\Simple\Simple.csproj", "{131BDB5F-5C6F-4AC7-B03E-394B1B75E120}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver.Benchmark", "MongoDB.Driver.Benchmark\MongoDB.Driver.Benchmark.csproj", "{5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB", "source\MongoDB\MongoDB.csproj", "{B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS", "MongoDB.GridFS\MongoDB.GridFS.csproj", "{B42DBBF9-0A1F-4749-9787-013BF8D8F435}" +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tools", "Tools", "{AEFFB75E-9365-4BC6-87E9-148CEECA0C0C}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS.Tests", "MongoDB.GridFS.Tests\MongoDB.GridFS.Tests.csproj", "{0C293FE9-F670-4FEF-A60F-20F8C978B1CD}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS", "source\MongoDB.GridFS\MongoDB.GridFS.csproj", "{B42DBBF9-0A1F-4749-9787-013BF8D8F435}" EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{4345382B-FAA2-46E2-99CF-C90ACA2DD574}" - ProjectSection(SolutionItems) = postProject - EndProjectSection +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS.Tests", "source\MongoDB.GridFS.Tests\MongoDB.GridFS.Tests.csproj", "{0C293FE9-F670-4FEF-A60F-20F8C978B1CD}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Simple", "examples\Simple\Simple.csproj", "{131BDB5F-5C6F-4AC7-B03E-394B1B75E120}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Tests", "source\MongoDB.Tests\MongoDB.Tests.csproj", "{C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}" EndProject -Project("{F184B08F-C81C-45F6-A57F-5ABD9991F28F}") = "SimpleVB", "examples\SimpleVB\SimpleVB.vbproj", "{1BAAE3D8-7720-4AA5-9335-E59824E7B667}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Benchmark", "tools\Benchmark\Benchmark.csproj", "{5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -32,32 +25,14 @@ Global Release|Any CPU = Release|Any CPU EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.Build.0 = Release|Any CPU - {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.Build.0 = Debug|Any CPU - {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.ActiveCfg = Release|Any CPU - {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.Build.0 = Release|Any CPU {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Debug|Any CPU.Build.0 = Debug|Any CPU {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Release|Any CPU.ActiveCfg = Release|Any CPU {1BAAE3D8-7720-4AA5-9335-E59824E7B667}.Release|Any CPU.Build.0 = Release|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Release|Any CPU.Build.0 = Release|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.Build.0 = Release|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Debug|Any CPU.Build.0 = Debug|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Release|Any CPU.ActiveCfg = Release|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Release|Any CPU.Build.0 = Release|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Debug|Any CPU.Build.0 = Debug|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.ActiveCfg = Release|Any CPU + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120}.Release|Any CPU.Build.0 = Release|Any CPU {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.Build.0 = Debug|Any CPU {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -66,12 +41,26 @@ Global {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Debug|Any CPU.Build.0 = Debug|Any CPU {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.ActiveCfg = Release|Any CPU {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.Build.0 = Release|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.Build.0 = Release|Any CPU {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.Build.0 = Debug|Any CPU {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.ActiveCfg = Release|Any CPU {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.Build.0 = Release|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Debug|Any CPU.Build.0 = Debug|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Release|Any CPU.Build.0 = Release|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {1BAAE3D8-7720-4AA5-9335-E59824E7B667} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} + {131BDB5F-5C6F-4AC7-B03E-394B1B75E120} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B} = {AEFFB75E-9365-4BC6-87E9-148CEECA0C0C} EndGlobalSection GlobalSection(MonoDevelopProperties) = preSolution StartupItem = MongoDBDriver\MongoDB.Driver.csproj @@ -92,6 +81,28 @@ Global $3.inheritsScope = text/plain $3.scope = text/x-csharp $0.CSharpFormattingPolicy = $4 + $4.IndentSwitchBody = True + $4.MethodBraceStyle = EndOfLineWithoutSpace + $4.BeforeMethodCallParentheses = False + $4.BeforeMethodDeclarationParentheses = False + $4.BeforeConstructorDeclarationParentheses = False + $4.BeforeDelegateDeclarationParentheses = False + $4.NewParentheses = False + $4.MethodBraceStyle = EndOfLine + $4.ConstructorBraceStyle = EndOfLine + $4.DestructorBraceStyle = EndOfLine + $4.BeforeMethodCallParentheses = False + $4.BeforeMethodDeclarationParentheses = False + $4.BeforeConstructorDeclarationParentheses = False + $4.NewParentheses = False + $4.IfParentheses = False + $4.WhileParentheses = False + $4.ForParentheses = False + $4.ForeachParentheses = False + $4.CatchParentheses = False + $4.SwitchParentheses = False + $4.LockParentheses = False + $4.UsingParentheses = False $4.inheritsSet = Mono $4.inheritsScope = text/x-csharp $4.scope = text/x-csharp @@ -102,11 +113,4 @@ Global $5.inheritsScope = text/plain $5.scope = text/x-vb EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(NestedProjects) = preSolution - {1BAAE3D8-7720-4AA5-9335-E59824E7B667} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} - {131BDB5F-5C6F-4AC7-B03E-394B1B75E120} = {4345382B-FAA2-46E2-99CF-C90ACA2DD574} - EndGlobalSection EndGlobal diff --git a/MongoDB.Driver.Benchmark/Main.cs b/MongoDB.Driver.Benchmark/Main.cs deleted file mode 100644 index 18e513a3..00000000 --- a/MongoDB.Driver.Benchmark/Main.cs +++ /dev/null @@ -1,265 +0,0 @@ -using System; -using System.IO; -using System.Threading; - -using MongoDB.Driver; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Benchmark -{ - /// - /// This is the standard 10gen benchmark program. - /// - class MainClass - { - static Document small = new Document(); - static Document medium = new Document(); - static Document large = new Document(); - - static int trials = 1; - static int perTrial = 5000; - static int batchSize = 100; - - public static void Main (string[] args) - { - SetupDocuments(); - - Mongo m = new Mongo(); - m.Connect(); - Database db = m["benchmark"]; - - db.MetaData.DropDatabase(); - Console.WriteLine("Starting Tests"); - - RunEncodeTest("encode (small)",small); - RunEncodeTest("encode (medium)", medium); - RunEncodeTest("encode (large)", large); - - RunDecodeTest("decode (small)",small); - RunDecodeTest("decode (medium)", medium); - RunDecodeTest("decode (large)", large); - - db.MetaData.DropDatabase(); - RunInsertTest("insert (small, no index)", db, "small_none",small,false,false); - RunInsertTest("insert (medium, no index)", db, "medium_none",medium,false,false); - RunInsertTest("insert (large, no index)", db, "large_none",large,false,false); - - RunInsertTest("insert (small, indexed)", db, "small_index",small,true,false); - RunInsertTest("insert (medium, indexed)", db, "medium_index",medium,true,false); - RunInsertTest("insert (large, indexed)", db, "large_index",large,true,false); - - RunInsertTest("batch insert (small, no index)", db, "small_bulk",small,false,true); - RunInsertTest("batch insert (medium, no index)", db, "medium_bulk",medium,false,true); - RunInsertTest("batch insert (large, no index)", db, "large_bulk",large,false,true); - - Document fonespec = new Document().Append("x",perTrial/2); - RunFindTest("find_one (small, no index)", db, "small_none",fonespec,false); - RunFindTest("find_one (medium, no index)", db, "medium_none",fonespec,false); - RunFindTest("find_one (large, no index)", db, "large_none",fonespec,false); - - RunFindTest("find_one (small, indexed)", db, "small_index",fonespec,false); - RunFindTest("find_one (medium, indexed)", db, "medium_index",fonespec,false); - RunFindTest("find_one (large, indexed)", db, "large_index",fonespec,false); - - RunFindTest("find (small, no index)", db, "small_none",fonespec,true); - RunFindTest("find (medium, no index)", db, "medium_none",fonespec,true); - RunFindTest("find (large, no index)", db, "large_none",fonespec,true); - - RunFindTest("find (small, indexed)", db, "small_index",fonespec,true); - RunFindTest("find (medium, indexed)", db, "medium_index",fonespec,true); - RunFindTest("find (large, indexed)", db, "large_index",fonespec,true); - - Document findRange = new Document().Append("x",new Document().Append("$gt",perTrial/2).Append("$lt", perTrial/2 + batchSize)); - RunFindTest("find range (small, indexed)", db, "small_index",findRange,true); - RunFindTest("find range (medium, indexed)", db, "medium_index",findRange,true); - RunFindTest("find range (large, indexed)", db, "large_index",findRange,true); - - System.Console.WriteLine("Press any key to continue..."); - System.Console.ReadKey(); - } - - static void SetupDocuments(){ - medium.Append("integer", (int) 5); - medium.Append("number", 5.05); - medium.Append("boolean", false); - medium.Append("array", new String[]{"test","benchmark"}); - - large.Append("base_url", "http://www.example.com/test-me"); - large.Append("total_word_count", (int)6743); - large.Append("access_time", DateTime.UtcNow); - large.Append("meta_tags", new Document() - .Append("description", "i am a long description string") - .Append("author", "Holly Man") - .Append("dynamically_created_meta_tag", "who know\n what")); - large.Append("page_structure", new Document().Append("counted_tags", 3450) - .Append("no_of_js_attached", (int)10) - .Append("no_of_images", (int)6)); - string[] words = new string[]{"10gen","web","open","source","application","paas", - "platform-as-a-service","technology","helps", - "developers","focus","building","mongodb","mongo"}; - string[] harvestedWords = new string[words.Length * 20]; - for(int i = 0; i < words.Length * 20; i++){ - harvestedWords[i] = words[i % words.Length]; - } - large.Append("harvested_words", harvestedWords); - } -#region Insert Tests - static void RunInsertTest(string name, Database db, string col, Document doc, bool index, bool bulk){ - TimeSpan lowest = TimeSpan.MaxValue; - for(int i = 0; i < trials; i++){ - SetupInsert(db,"col",index); - TimeSpan ret = TimeInsert(db, col,doc, bulk); - if(ret < lowest) lowest = ret; - } - int opsSec = (int)(perTrial/lowest.TotalSeconds); - Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); - } - - static void SetupInsert(Database db, string col, bool index){ - try{ - db.MetaData.DropCollection(col); - if(index){ - Document idx = new Document().Append("x", IndexOrder.Ascending); - db[col].MetaData.CreateIndex(idx,false); - } - }catch(MongoCommandException){ - //swallow for now. - } - } - - static TimeSpan TimeInsert(Database db, string col, Document doc, bool bulk){ - DateTime start = DateTime.Now; - if(bulk){ - DoBulkInsert(db,col,doc, batchSize); - }else{ - DoInsert(db,col,doc); - } - DateTime stop = DateTime.Now; - TimeSpan t = stop - start; - return t; - } - - static void DoInsert(Database db, string col, Document doc){ - for(int i = 0; i < perTrial; i++){ - Document ins = new Document(); - doc.CopyTo(ins); - ins.Append("x", i); - db[col].Insert(ins); - } - } - - static void DoBulkInsert(Database db, string col, Document doc, int size){ - for(int i = 0; i < perTrial / size; i++){ - Document[] docs = new Document[size]; - for(int f = 0; f < docs.Length; f++){ - Document ins = new Document(); - doc.CopyTo(ins); - docs[f] = ins; - } - db[col].Insert(docs); - } - } -#endregion - -#region Encode Tests - static void RunEncodeTest(string name, Document doc){ - TimeSpan lowest = TimeSpan.MaxValue; - for(int i = 0; i < trials; i++){ - TimeSpan ret = TimeEncode(doc); - if(ret < lowest) lowest = ret; - } - int opsSec = (int)(perTrial/lowest.TotalSeconds); - Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); - } - - static TimeSpan TimeEncode(Document doc){ - DateTime start = DateTime.Now; - DoEncode(doc); - DateTime stop = DateTime.Now; - TimeSpan t = stop - start; - return t; - } - - static void DoEncode(Document doc){ - MemoryStream ms = new MemoryStream(); - for(int i = 0; i < perTrial; i++){ - BsonWriter writer = new BsonWriter(ms); - writer.Write(doc); - ms.Seek(0,SeekOrigin.Begin); - } - } -#endregion - - static void RunDecodeTest(string name, Document doc){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - writer.Write(doc); - - byte[] buff = ms.ToArray(); - - TimeSpan lowest = TimeSpan.MaxValue; - for(int i = 0; i < trials; i++){ - TimeSpan ret = TimeDecode(buff); - if(ret < lowest) lowest = ret; - } - int opsSec = (int)(perTrial/lowest.TotalSeconds); - Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); - } - - static TimeSpan TimeDecode(byte[] doc){ - DateTime start = DateTime.Now; - DoDecode(doc); - DateTime stop = DateTime.Now; - TimeSpan t = stop - start; - return t; - } - - static void DoDecode(byte[] buff){ - MemoryStream ms = new MemoryStream(buff); - for(int i = 0; i < perTrial; i++){ - BsonReader reader = new BsonReader(ms); - reader.Read(); - ms.Seek(0,SeekOrigin.Begin); - } - } - - #region Find Tests - static void RunFindTest(string name, Database db, string col, Document spec, bool range){ - TimeSpan lowest = TimeSpan.MaxValue; - for(int i = 0; i < trials; i++){ - TimeSpan ret = TimeFind(db, col, spec, range); - if(ret < lowest) lowest = ret; - } - int opsSec = (int)(perTrial/lowest.TotalSeconds); - Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); - } - - static TimeSpan TimeFind(Database db, string col,Document psec, bool range){ - DateTime start = DateTime.Now; - if(range){ - DoFindOne(db,col,psec); - }else{ - DoFind(db,col,psec); - } - DateTime stop = DateTime.Now; - TimeSpan t = stop - start; - return t; - } - - static void DoFindOne(Database db, string col, Document spec){ - for(int i = 0; i < perTrial; i++){ - db[col].FindOne(spec); - } - } - - static void DoFind(Database db, string col, Document spec){ - for(int i = 0; i < perTrial; i++){ - ICursor cur = db[col].Find(spec); - foreach(Document d in cur.Documents){ - } - } - } - #endregion - - } -} \ No newline at end of file diff --git a/MongoDB.Driver.Benchmark/MongoDB.Driver.Benchmark.csproj b/MongoDB.Driver.Benchmark/MongoDB.Driver.Benchmark.csproj deleted file mode 100644 index d1f6247d..00000000 --- a/MongoDB.Driver.Benchmark/MongoDB.Driver.Benchmark.csproj +++ /dev/null @@ -1,44 +0,0 @@ - - - - Debug - AnyCPU - 9.0.21022 - 2.0 - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B} - Exe - MongoDB.Driver.Benchmark - MongoDB.Driver.Benchmark - v3.5 - - - true - full - false - bin\Debug - DEBUG - prompt - 4 - - - none - false - bin\Release - prompt - 4 - - - - - - - - - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - - \ No newline at end of file diff --git a/MongoDB.GridFS.Tests/AssemblyInfo.cs b/MongoDB.GridFS.Tests/AssemblyInfo.cs deleted file mode 100644 index a393bb35..00000000 --- a/MongoDB.GridFS.Tests/AssemblyInfo.cs +++ /dev/null @@ -1,26 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; - -// Information about this assembly is defined by the following attributes. -// Change them to the values specific to your project. - -[assembly: AssemblyTitle("MongoDB.GridFS.Tests")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("")] -[assembly: AssemblyCopyright("")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". -// The form "{Major}.{Minor}.*" will automatically update the build and revision, -// and "{Major}.{Minor}.{Build}.*" will update just the revision. - -[assembly: AssemblyVersion("1.0.*")] - -// The following attributes are used to specify the signing key for the assembly, -// if desired. See the Mono documentation for more information about signing. - -[assembly: AssemblyDelaySign(false)] -[assembly: AssemblyKeyFile("")] diff --git a/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj b/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj deleted file mode 100644 index b8d868c9..00000000 --- a/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj +++ /dev/null @@ -1,62 +0,0 @@ - - - - Debug - AnyCPU - 9.0.21022 - 2.0 - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD} - Library - MongoDB.GridFS.Tests - v3.5 - MongoDB.GridFS.Tests - - - true - full - false - bin\Debug - DEBUG - prompt - 4 - false - - - none - false - bin\Release - prompt - 4 - false - - - - - False - ..\redist\nunit.framework.dll - - - - - - - - - - - - - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02} - MongoDB.Driver.Tests - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - {B42DBBF9-0A1F-4749-9787-013BF8D8F435} - MongoDB.GridFS - - - - \ No newline at end of file diff --git a/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.dll.config b/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.dll.config deleted file mode 100644 index a24d6958..00000000 --- a/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.dll.config +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/MongoDB.GridFS.Tests/Properties/AssemblyInfo.cs b/MongoDB.GridFS.Tests/Properties/AssemblyInfo.cs deleted file mode 100644 index 24d8cdcc..00000000 --- a/MongoDB.GridFS.Tests/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("MongoDB.Driver.GridFS.Tests")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("Microsoft")] -[assembly: AssemblyProduct("MongoDB.Driver.GridFS.Tests")] -[assembly: AssemblyCopyright("Copyright © Microsoft 2009")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("94ed0246-0a62-4e43-94fb-bd52a9efc901")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/MongoDB.GridFS/GridChunk.cs b/MongoDB.GridFS/GridChunk.cs deleted file mode 100644 index b4397a6d..00000000 --- a/MongoDB.GridFS/GridChunk.cs +++ /dev/null @@ -1,68 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Text; -using MongoDB.Driver; - -namespace MongoDB.Driver.GridFS -{ - public class GridChunk : IComparable //TODO Change back to a struct - { - public GridChunk(object filesId, int n, byte[] data){ -// OidGenerator oidGenerator = new OidGenerator(); -// this.id = oidGenerator.Generate(); - this.filesId = filesId; - this.n = n; - this.data = new Binary(data); - } - - public GridChunk(Document doc) - { - this.id = (Oid)doc["_id"]; - this.filesId = (Object)doc["files_id"]; - this.n = Convert.ToInt32(doc["n"]); - this.data = (Binary)doc["data"]; - } - - // object id of the chunk in the _chunks collection - private Object id; - public Object Id{ - get { return this.id; } - set { this.id = value; } - } - // id value of the owning {{files}} collection entry - private Object filesId; - public Object FilesId{ - get { return this.filesId; } - set { this.filesId = value; } - } - - //Chunk number - private int n; - public int N{ - get { return this.n; } - set { this.n = value; } - } - - private Binary data; - public Binary Data{ - get { return this.data; } - } - - //Allow sorting by chunk number - public int CompareTo(Object obj){ - GridChunk chunk = (GridChunk)obj; - return this.n.CompareTo(chunk.N); - } - - public Document ToDocument() - { - Document doc = new Document(); - if(this.id != null) doc["_id"] = this.id; - doc["files_id"] = this.filesId; - doc["n"] = this.n; - doc["data"] = this.data; - return doc; - } - - } -} diff --git a/MongoDB.GridFS/GridException.cs b/MongoDB.GridFS/GridException.cs deleted file mode 100644 index 64d89e14..00000000 --- a/MongoDB.GridFS/GridException.cs +++ /dev/null @@ -1,19 +0,0 @@ -using System; - -namespace MongoDB.GridFS -{ - public class MongoGridFSException : Exception - { - private string filename; - public string Filename - { - get { return filename; } - } - - public MongoGridFSException(string message, string filename, Exception inner) - : base(message, inner) - { - this.filename = filename; - } - } -} diff --git a/MongoDB.GridFS/GridFileStream.cs b/MongoDB.GridFS/GridFileStream.cs deleted file mode 100644 index 2fe398df..00000000 --- a/MongoDB.GridFS/GridFileStream.cs +++ /dev/null @@ -1,389 +0,0 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.IO; - -using MongoDB.Driver; - -namespace MongoDB.GridFS -{ - /// - /// Stream for reading and writing to a file in GridFS. - /// - /// - /// When using the stream for random io it is possible to produce chunks in the begining and middle of the - /// file that are not full size followed by other chunks that are full size. This only affects the md5 sum - /// that is calculated on the file on close. Because of this do not rely on the md5 sum of a file when doing - /// random io. Writing to the stream sequentially works fine and will produce a consistent md5. - /// - public class GridFileStream : Stream - { - - private IMongoCollection files; - private IMongoCollection chunks; - private Document chunk; - private bool chunkDirty; - private long chunkLower = -1; - private long chunkUpper = -1; - - private byte[] buffer; - private byte[] blankBuffer; - private int buffPosition; - private int highestBuffPosition; - private long highestPosWritten; - - - #region Properties - private GridFileInfo gridFileInfo; - public GridFileInfo GridFileInfo { - get { return gridFileInfo; } - set { gridFileInfo = value; } - } - - private bool canRead; - public override bool CanRead { - get { return canRead; } - } - - private bool canWrite; - public override bool CanWrite { - get { return canRead; } - } - - public override bool CanSeek { - get { return true; } - } - - public override long Length { - get { - return gridFileInfo.Length; - } - } - - private long position; - public override long Position { - get { - return position; - } - set { - this.Seek(value, SeekOrigin.Begin); - } - } - #endregion - - public GridFileStream(GridFileInfo gridfileinfo,IMongoCollection files, IMongoCollection chunks, FileAccess access){ - switch (access){ - case FileAccess.Read: - canRead = true; - break; - case FileAccess.ReadWrite: - canRead = true; - canWrite = true; - break; - case FileAccess.Write: - canWrite = true; - break; - } - this.gridFileInfo = gridfileinfo; - this.files = files; - this.chunks = chunks; - this.buffer = new byte[gridFileInfo.ChunkSize]; - this.blankBuffer = new byte[gridFileInfo.ChunkSize]; - this.highestPosWritten = this.gridFileInfo.Length; - this.MoveTo(0); - } - - /// - /// Reads data from the stream into the specified array. It will fill the array in starting at offset and - /// adding count bytes returning the number of bytes read from the stream. - /// - public override int Read(byte[] array, int offset, int count){ - int bytesLeftToRead = count; - int bytesRead = 0; - while(bytesLeftToRead > 0 && this.position < this.Length){ - int buffAvailable = buffer.Length - buffPosition; - int readCount = 0; - if(buffAvailable > bytesLeftToRead){ - readCount = bytesLeftToRead; - }else{ - readCount = buffAvailable; - } - if(readCount + position > highestPosWritten){ - //adjust readcount so that we don't read past the end of file. - readCount = readCount - (int)(readCount + position - highestPosWritten); - } - Array.Copy(buffer,buffPosition,array,offset,readCount); - buffPosition += readCount; - bytesLeftToRead -= readCount; - bytesRead += readCount; - offset += readCount; - MoveTo(position + readCount); - } - return bytesRead; - } - - private void ValidateReadState(byte[] array, int offset, int count){ - if (array == null){ - throw new ArgumentNullException("array", new Exception("array is null")); - } - else if (offset < 0){ - throw new ArgumentOutOfRangeException("offset", new Exception("offset is negative")); - } - else if (count < 0){ - throw new ArgumentOutOfRangeException("count", new Exception("count is negative")); - } - else if ((array.Length - offset) < count){ - throw new MongoGridFSException("Invalid count argument", gridFileInfo.FileName, null); - } - else if (!canRead){ - throw new MongoGridFSException("Reading this file is not supported", gridFileInfo.FileName, null); - } - } - - /// - /// Copies from the source array into the grid file. - /// - /// - /// A The source array to copy from. - /// - /// - /// A The offset within the source array. - /// - /// - /// A The number of bytes from within the source array to copy. - /// - public override void Write(byte[] array, int offset, int count){ - ValidateWriteState(array,offset,count); - - int bytesLeftToWrite = count; - while(bytesLeftToWrite > 0){ - int buffAvailable = buffer.Length - buffPosition; - int writeCount = 0; - if(buffAvailable > bytesLeftToWrite){ - writeCount = bytesLeftToWrite; - }else{ - writeCount = buffAvailable; - } - Array.Copy(array,offset,buffer,buffPosition,writeCount); - chunkDirty = true; - buffPosition += writeCount; - offset += writeCount; - bytesLeftToWrite -= writeCount; - MoveTo(position + writeCount); - highestPosWritten = Math.Max(highestPosWritten, position); - } - } - - private void ValidateWriteState(byte[] array, int offset, int count){ - if (array == null){ - throw new ArgumentNullException("array", new Exception("array is null")); - }else if (offset < 0){ - throw new ArgumentOutOfRangeException("offset", new Exception("offset is negative")); - }else if (count < 0){ - throw new ArgumentOutOfRangeException("count",new Exception("count is negative")); - }else if ((array.Length - offset) < count){ - throw new MongoGridFSException("Invalid count argument", gridFileInfo.FileName, null); - }else if (!canWrite){ - throw new System.NotSupportedException("Stream does not support writing."); - } - } - - - /// - /// Flushes any changes to current chunk to the database. It can be called in client code at any time or it - /// will automatically be called on Close() and when the stream position moves off the bounds of the current - /// chunk. - /// - public override void Flush(){ - if(chunkDirty == false) return; - //avoid a copy if possible. - if(highestBuffPosition == buffer.Length){ - chunk["data"] = new Binary(buffer); - }else{ - byte[] data = new byte[highestBuffPosition]; - Array.Copy(buffer,data,highestBuffPosition); - chunk["data"] = new Binary(data); - } - - - if(chunk.Contains("_id")){ - chunks.Update(chunk); - }else{ - chunks.Insert(chunk); - } - this.gridFileInfo.Length = highestPosWritten; - } - - /// - /// Seek to any location in the stream. Seeking past the end of the file is allowed. Any writes to that - /// location will cause the file to grow to that size. Any holes that may be created from the seek will - /// be zero filled on close. - /// - public override long Seek(long offset, SeekOrigin origin){ - if ((origin < SeekOrigin.Begin) || (origin > SeekOrigin.End)){ - throw new ArgumentException("Invalid Seek Origin"); - } - - switch (origin){ - case SeekOrigin.Begin: - if (offset < 0){ - throw new ArgumentException("Attempted seeking before the begining of the stream"); - }else{ - MoveTo(offset); - } - break; - case SeekOrigin.Current: - MoveTo(position + offset); - break; - case SeekOrigin.End: - if (offset <= 0){ - throw new ArgumentException("Attempted seeking after the end of the stream"); - } - MoveTo(this.Length - offset); - break; - } - return position; - } - - /// - /// Sets the length of this stream to the given value. - /// - /// - /// A - /// - public override void SetLength(long value){ - if(value < 0) throw new ArgumentOutOfRangeException("length"); - if(this.CanSeek == false || this.CanWrite == false) { - throw new NotSupportedException("The stream does not support both writing and seeking."); - } - - if(value < highestPosWritten) { - TruncateAfter(value); - }else{ - this.Seek(value, SeekOrigin.Begin); - } - chunkDirty = true; - this.gridFileInfo.Length = value; - highestPosWritten = value; - - } - - /// - /// Close the stream and flush any changes to the database. - /// - public override void Close(){ - this.Flush(); - this.gridFileInfo.Length = highestPosWritten; - EnsureNoHoles(); - string md5 = gridFileInfo.CalcMD5(); - gridFileInfo.Md5 = md5; - this.files.Update(gridFileInfo.ToDocument()); - base.Close(); - } - - /// - /// Moves the current position to the new position. If this causes a new chunk to need to be loaded it will take - /// care of flushing the buffer and loading a new chunk. - /// - /// - /// A designating where to go to. - /// - private void MoveTo(long position){ - this.position = position; - int chunkSize = this.gridFileInfo.ChunkSize; - bool chunkInRange = (chunk != null && position >= chunkLower && position < chunkUpper); - if(chunkInRange == false){ - if(chunk != null && chunkDirty){ - highestBuffPosition = Math.Max(highestBuffPosition, buffPosition); - this.Flush(); - } - int chunknum = (int)Math.Floor((double)(position / chunkSize)); - Array.Copy(blankBuffer,buffer,buffer.Length); - LoadOrCreateChunk(chunknum); - chunkDirty = false; - chunkLower = chunknum * chunkSize; - chunkUpper = chunkLower + chunkSize; - } - buffPosition = (int)(position % chunkSize); - highestBuffPosition = Math.Max(highestBuffPosition, buffPosition); - - } - - /// - /// Loads a chunk from the chunks collection if it exists. Otherwise it creates a blank chunk Document. - /// - /// - private void LoadOrCreateChunk(int num){ - Object fid = this.GridFileInfo.Id; - Document spec = new Document().Append("files_id", fid).Append("n",num); - chunk = this.chunks.FindOne(spec); - if(chunk == null) { - chunk = spec; - highestBuffPosition = 0; - }else{ - Binary b = (Binary)chunk["data"]; - highestBuffPosition = b.Bytes.Length; - Array.Copy(b.Bytes,buffer, highestBuffPosition); - } - } - - - /// - /// Deletes all chunks after the specified position and clears out any extra bytes if the position doesn't fall on - /// a chunk boundry. - /// - private void TruncateAfter(long value){ - int chunknum = CalcChunkNum(value); - Document spec = new Document().Append("files_id", this.gridFileInfo.Id) - .Append("n",new Document().Append("$gt",chunknum)); - this.chunks.Delete(spec); - this.MoveTo(value ); - Array.Copy(blankBuffer,0,buffer,buffPosition, buffer.Length - buffPosition); - highestBuffPosition = buffPosition; - } - - private int CalcChunkNum(long position){ - int chunkSize = this.gridFileInfo.ChunkSize; - return (int)Math.Floor((double)(position / chunkSize)); - } - - /// - /// Makes sure that at least a skelton chunk exists for all numbers. If not the MD5 calculation will fail on a sparse file. - /// - private void EnsureNoHoles(){ - int highChunk = CalcChunkNum(this.GridFileInfo.Length); - Document query = new Document().Append("files_id", this.GridFileInfo.Id) - .Append("n", new Document() - .Append("$lte",highChunk)); - Document sort = new Document().Append("n",1); - Document fields = new Document().Append("_id", 1).Append("n",1); - - Binary data = new Binary(this.blankBuffer); - int i = 0; - using (ICursor cur = chunks.Find(new Document().Append("query",query).Append("sort",sort),0,0,fields)){ - foreach(Document doc in cur.Documents){ - int n = Convert.ToInt32(doc["n"]); - if(i < n){ - while(i < n){ - chunks.Insert(new Document().Append("files_id", this.gridFileInfo.Id) - .Append("n", i) - .Append("data", data) - ); - i++; - } - }else{ - i++; - } - } - } - - } - - protected override void Dispose(bool disposing){ - this.canRead = false; - this.canWrite = false; - - base.Dispose(disposing); - } - } -} \ No newline at end of file diff --git a/MongoDB.GridFS/MongoDB.GridFS.csproj b/MongoDB.GridFS/MongoDB.GridFS.csproj deleted file mode 100644 index 83089bea..00000000 --- a/MongoDB.GridFS/MongoDB.GridFS.csproj +++ /dev/null @@ -1,49 +0,0 @@ - - - - Debug - AnyCPU - 9.0.21022 - 2.0 - {B42DBBF9-0A1F-4749-9787-013BF8D8F435} - Library - MongoDB.GridFS - v2.0 - MongoDB.GridFS - - - true - full - false - bin\Debug - DEBUG - prompt - 4 - false - - - none - false - bin\Release - prompt - 4 - false - - - - - - - - - - - - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - - \ No newline at end of file diff --git a/MongoDB.GridFS/Properties/AssemblyInfo.cs b/MongoDB.GridFS/Properties/AssemblyInfo.cs deleted file mode 100644 index 7e1330f5..00000000 --- a/MongoDB.GridFS/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("MongoDB.Driver.GridFS")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("Microsoft")] -[assembly: AssemblyProduct("MongoDB.Driver.GridFS")] -[assembly: AssemblyCopyright("Copyright © Microsoft 2009")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("05f4c345-88fc-4e22-87c8-4e1292da6faf")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/MongoDB.Linq.Tests/App.config b/MongoDB.Linq.Tests/App.config deleted file mode 100644 index 1680d110..00000000 --- a/MongoDB.Linq.Tests/App.config +++ /dev/null @@ -1,10 +0,0 @@ - - - -
- - - - - - \ No newline at end of file diff --git a/MongoDB.Linq.Tests/AppSettingsFactory.cs b/MongoDB.Linq.Tests/AppSettingsFactory.cs deleted file mode 100644 index 932c0fe3..00000000 --- a/MongoDB.Linq.Tests/AppSettingsFactory.cs +++ /dev/null @@ -1,23 +0,0 @@ -using System.Configuration; -using MongoDB.Driver; -using MongoDB.Driver.Connections; - -namespace MongoDB.Linq.Tests { - public static class AppSettingsFactory { - - public static string Host { get { return ConfigurationManager.AppSettings["mongo.host"]; } } - public static int Port { get { return int.Parse(ConfigurationManager.AppSettings["mongo.port"]); } } - - public static Mongo CreateMongo() { - var builder = new MongoConnectionStringBuilder(); - builder.AddServer(Host,Port); - return new Mongo(builder.ToString()); - } - - public static Connection CreateConnection() { - var builder = new MongoConnectionStringBuilder(); - builder.AddServer(Host, Port); - return ConnectionFactory.GetConnection(builder.ToString()); - } - } -} \ No newline at end of file diff --git a/MongoDB.Linq.Tests/MongoDB.Linq.Tests.csproj b/MongoDB.Linq.Tests/MongoDB.Linq.Tests.csproj deleted file mode 100644 index a3f878d7..00000000 --- a/MongoDB.Linq.Tests/MongoDB.Linq.Tests.csproj +++ /dev/null @@ -1,85 +0,0 @@ - - - - Debug - AnyCPU - 9.0.30729 - 2.0 - {870FE8E1-3461-4C79-BF25-9C35E41BF582} - Library - Properties - MongoDB.Linq.Tests - MongoDB.Linq.Tests - v3.5 - 512 - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - - - - False - ..\redist\Moq.dll - - - False - ..\redist\nunit.framework.dll - - - - - 3.5 - - - 3.5 - - - 3.5 - - - - - - - - - - - - - - - - - - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E} - MongoDB.Linq - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - - - \ No newline at end of file diff --git a/MongoDB.Linq.Tests/Properties/AssemblyInfo.cs b/MongoDB.Linq.Tests/Properties/AssemblyInfo.cs deleted file mode 100644 index f4d382f8..00000000 --- a/MongoDB.Linq.Tests/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("MongoDB.Linq.Tests")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("MongoDB.Linq.Tests")] -[assembly: AssemblyCopyright("Copyright © 2009")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("c3a2a135-887c-47db-a891-18924f9cf068")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] \ No newline at end of file diff --git a/MongoDB.Linq.Tests/TestExpressions.cs b/MongoDB.Linq.Tests/TestExpressions.cs deleted file mode 100644 index 2cedd7e5..00000000 --- a/MongoDB.Linq.Tests/TestExpressions.cs +++ /dev/null @@ -1,64 +0,0 @@ -using System; -using System.Linq.Expressions; -using System.Reflection; -using NUnit.Framework; - -namespace MongoDB.Linq.Tests { - // ReSharper disable InconsistentNaming - [TestFixture] - public class TestExpressions { - [Test] - public void Getting_at_member_expression_values() { - var foo = new { Bar = "abc" }; - Expression> expression = () => foo.Bar; - var memberExpression = expression.Body as MemberExpression; - switch (memberExpression.Member.MemberType) { - case MemberTypes.Property: - var propertyInfo = (PropertyInfo)memberExpression.Member; - var innerMember = (MemberExpression)memberExpression.Expression; - var fieldInfo = (FieldInfo)innerMember.Member; - var obj = fieldInfo.GetValue(((ConstantExpression)innerMember.Expression).Value); - Assert.AreEqual("abc", propertyInfo.GetValue(obj, null)); - break; - default: - Assert.Fail(); - break; - } - } - - [Test] - public void Getting_field_from_closure() { - string key = "xyz"; - Expression> expression = () => key; - var memberExpression = expression.Body as MemberExpression; - switch (memberExpression.Member.MemberType) { - case MemberTypes.Field: - var fieldInfo = (FieldInfo)memberExpression.Member; - Assert.AreEqual("xyz", fieldInfo.GetValue(((ConstantExpression)memberExpression.Expression).Value)); - break; - default: - Assert.Fail(); - break; - } - } - - [Test] - public void Evaluating_a_MethodCallExpression_with_known_return_type() { - Expression> expression = () => DateTime.Parse("2009/10/10"); - var methodCallExpression = expression.Body as MethodCallExpression; - Expression> lambda = Expression.Lambda>(methodCallExpression); - var value = lambda.Compile()(); - Assert.AreEqual(DateTime.Parse("2009/10/10"), value); - } - - [Test] - public void Evaluating_a_MethodCallExpression_with_unknown_return_type() { - Expression> expression = () => DateTime.Parse("2009/10/10"); - var methodCallExpression = expression.Body as MethodCallExpression; - var lambda = Expression.Lambda(methodCallExpression); - var value = lambda.Compile().DynamicInvoke(); - Assert.AreEqual(DateTime.Parse("2009/10/10"), value); - } - } - // ReSharper restore InconsistentNaming -} diff --git a/MongoDB.Linq.Tests/TestMongoDocumentQuerySyntax.cs b/MongoDB.Linq.Tests/TestMongoDocumentQuerySyntax.cs deleted file mode 100644 index 4d953a8d..00000000 --- a/MongoDB.Linq.Tests/TestMongoDocumentQuerySyntax.cs +++ /dev/null @@ -1,318 +0,0 @@ -using System; -using System.Diagnostics; -using System.Linq; -using MongoDB.Driver; -using Moq; -using NUnit.Framework; - -namespace MongoDB.Linq.Tests { - // ReSharper disable InconsistentNaming - [TestFixture] - public class TestMongoDocumentQuerySyntax { - - private IMongoQuery queryable; - private Mock collectionMock; - private Mock cursorMock; - - [SetUp] - public void Setup() { - Debug.WriteLine("initializing queryable"); - collectionMock = new Mock(); - cursorMock = new Mock(); - collectionMock.Setup(c => c.Find(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(cursorMock.Object); - queryable = new MongoQuery(new MongoQueryProvider(collectionMock.Object)); - } - - [Test] - public void Can_use_in_query() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo").In("bar", "baz") select d); - Assert.AreEqual( - new Document().Append("foo", new Document().Append("$in", new[]{ "bar", "baz" })), - q.Query); - } - - [Test] - public void Can_use_in_query_with_array_ref() { - var a = new[] { "bar", "baz" }; - var q = (IMongoQuery)(from d in queryable where d.Key("foo").In(a) select d); - Assert.AreEqual( - new Document().Append("foo", new Document().Append("$in", new[] { "bar", "baz" })), - q.Query); - } - - [Test] - public void Can_use_not_in_query() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo").NotIn("bar", "baz") select d); - Assert.AreEqual( - new Document().Append("foo", new Document().Append("$nin", new[] { "bar", "baz" })), - q.Query); - } - - [Test] - public void Can_use_Equals() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo").Equals("bar") select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - #region string operator overloads - [Test] - public void Can_use_equality_op_on_string() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") == "bar" select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_string() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") != "bar" select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne","bar")).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_op_on_string_reversed() { - var q = (IMongoQuery)(from d in queryable where "bar" == d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_string_reversed() { - var q = (IMongoQuery)(from d in queryable where "bar" != d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne","bar")).ToString(), q.Query.ToString()); - } - #endregion - - #region int operator overloads - [Test] - public void Can_use_equality_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") == 10 select d); - Assert.AreEqual(new Document().Append("foo", 10).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") != 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") > 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") >= 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") < 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_int() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") <= 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 == d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", 10).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 != d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 > d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 >= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 < d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_int_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 <= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", 10)).ToString(), q.Query.ToString()); - } - #endregion - - #region double operator overloads - [Test] - public void Can_use_equality_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") == 10.1 select d); - Assert.AreEqual(new Document().Append("foo", 10.1).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") != 10.1 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") > 10.1 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") >= 10.1 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") < 10.1 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_double() { - var q = (IMongoQuery)(from d in queryable where d.Key("foo") <= 10.1 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 == d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", 10.1).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 != d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 > d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 >= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 < d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", 10.1)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_double_reversed() { - var q = (IMongoQuery)(from d in queryable where 10.1 <= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", 10.1)).ToString(), q.Query.ToString()); - } - #endregion - - #region double operator overloads - [Test] - public void Can_use_equality_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") == dt select d); - Assert.AreEqual(new Document().Append("foo",dt).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") != dt select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") > dt select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") >= dt select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") < dt select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_DateTime() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where d.Key("foo") <= dt select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt == d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", dt).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt != d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt > d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt >= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt < d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", dt)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal_op_on_DateTime_reversed() { - var dt = DateTime.Parse("2009-10-10T07:00:00.0000000Z"); - var q = (IMongoQuery)(from d in queryable where dt <= d.Key("foo") select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", dt)).ToString(), q.Query.ToString()); - } - #endregion - } - // ReSharper restore InconsistentNaming -} diff --git a/MongoDB.Linq.Tests/TestQueryExecution.cs b/MongoDB.Linq.Tests/TestQueryExecution.cs deleted file mode 100644 index e2bcf7f8..00000000 --- a/MongoDB.Linq.Tests/TestQueryExecution.cs +++ /dev/null @@ -1,32 +0,0 @@ -using System.Diagnostics; -using System.Linq; -using MongoDB.Driver; -using NUnit.Framework; - -namespace MongoDB.Linq.Tests { - // ReSharper disable InconsistentNaming - [TestFixture] - public class TestQueryExecution { - private Mongo mongo; - - [TestFixtureSetUp] - public void GlobalSetup() { - Debug.WriteLine("initiallizing connection"); - mongo = AppSettingsFactory.CreateMongo(); - mongo.Connect(); - } - - [TestFixtureTearDown] - public void GlobalTeardown() { - mongo.Disconnect(); - } - - [Test] - public void Can_build_simple_query() { - var c = mongo["foo"]["bar"]; - var q = from d in c.AsQueryable() where (string)d["name"] == "bob" select d; - var l = q.ToList(); - } - } - // ReSharper restore InconsistentNaming -} diff --git a/MongoDB.Linq.Tests/TestQueryParsing.cs b/MongoDB.Linq.Tests/TestQueryParsing.cs deleted file mode 100644 index 13fcf74a..00000000 --- a/MongoDB.Linq.Tests/TestQueryParsing.cs +++ /dev/null @@ -1,225 +0,0 @@ -using System; -using System.Diagnostics; -using System.Linq; -using MongoDB.Driver; -using Moq; -using NUnit.Framework; - -namespace MongoDB.Linq.Tests { - // ReSharper disable InconsistentNaming - [TestFixture] - public class TestQueryParsing { - - private IMongoQuery queryable; - private Mock collectionMock; - private Mock cursorMock; - - [SetUp] - public void Setup() { - Debug.WriteLine("initializing queryable"); - collectionMock = new Mock(); - cursorMock = new Mock(); - collectionMock.Setup(c => c.Find(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(cursorMock.Object); - queryable = new MongoQuery(new MongoQueryProvider(collectionMock.Object)); - } - - [Test] - public void No_where_produces_empty_Query() { - var q = (IMongoQuery)(from d in queryable select d); - Assert.IsNull(q.Query); - } - - [Test] - public void Can_call_ToList_on_query() { - (from d in queryable select d).ToList(); - collectionMock.Verify(c => c.Find(null, 0, 0, null)); - } - - [Test] - public void Can_call_AsEnumerable_on_query() { - var q = (from d in queryable select d).AsEnumerable(); - var enumerator = q.GetEnumerator(); - var first = enumerator.Current; - collectionMock.Verify(c => c.Find(null, 0, 0, null)); - } - - [Test] - public void No_skip_produces_zero_skip() { - var q = (IMongoQuery)(from d in queryable select d); - Assert.AreEqual(0, q.Skip); - } - - [Test] - public void Skip_5_produces_skip_5() { - var q = (IMongoQuery)(from d in queryable select d).Skip(5); - Assert.AreEqual(5, q.Skip); - } - - [Test] - public void No_take_produces_zero_limit() { - var q = (IMongoQuery)(from d in queryable select d); - Assert.AreEqual(0, q.Limit); - } - - [Test] - public void Take_5_produces_limit_5() { - var q = (IMongoQuery)(from d in queryable select d).Take(5); - Assert.AreEqual(5, q.Limit); - } - - [Test] - public void Can_chain_Take_and_Skip() { - var q = (IMongoQuery)(from d in queryable select d).Take(5).Skip(10); - Assert.AreEqual(5, q.Limit); - Assert.AreEqual(10, q.Skip); - } - - [Test] - public void FirstOrDefault_produces_limit_1() { - (from d in queryable select d).FirstOrDefault(); - collectionMock.Verify(c => c.Find(null, 1, 0, null)); - } - - [Test] - public void First_on_empty_sequence_throws() { - try { - (from d in queryable select d).First(); - Assert.Fail("First didn't throw"); - } catch (InvalidOperationException e) { - Assert.AreEqual("Sequence contains no elements", e.Message); - } - collectionMock.Verify(c => c.Find(null, 1, 0, null)); - } - - [Test] - public void Can_use_equality_where_clause_with_equals() { - var q = (IMongoQuery)(from d in queryable where Equals(d["foo"], "bar") select d); - Assert.AreEqual(new Document().Append("foo","bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_equals_method_on_Document_indexer() { - var q = (IMongoQuery)(from d in queryable where d["foo"].Equals("bar") select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_equals_method_on_value() { - var q = (IMongoQuery)(from d in queryable where "bar".Equals(d["foo"]) select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_left_cast_to_string() { - var q = (IMongoQuery)(from d in queryable where (string)d["foo"] == "bar" select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_right_cast_to_object() { - var q = (IMongoQuery)(from d in queryable where (string)d["foo"] == "bar" select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_left_and_right_reversed() { - var q = (IMongoQuery)(from d in queryable where "bar" == (string)d["foo"] select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_key_as_variable() { - string key = "foo"; - var q = (IMongoQuery)(from d in queryable where (string)d[key] == "bar" select d); - Assert.AreEqual(new Document().Append("foo", "bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_equality_where_clause_with_value_as_variable() { - string value = "bar"; - var q = (IMongoQuery)(from d in queryable where (string)d["foo"] == value select d); - Assert.AreEqual(new Document().Append("foo","bar").ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality() { - var q = (IMongoQuery)(from d in queryable where (string)d["foo"] != "bar" select d); - Assert.AreEqual(new Document().Append("foo",new Document().Append("$ne","bar")).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_inequality_reversed() { - var q = (IMongoQuery)(from d in queryable where "bar" != (string)d["foo"] select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$ne", "bar")).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than() { - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] > 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_reversed() { - var q = (IMongoQuery)(from d in queryable where 10 > (int)d["foo"] select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_greater_than_or_equal() { - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] >= 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$gte", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than() { - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] < 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lt", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_use_less_than_or_equal() { - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] <= 10 select d); - Assert.AreEqual(new Document().Append("foo", new Document().Append("$lte", 10)).ToString(), q.Query.ToString()); - } - - [Test] - public void Can_do_and_queries() { - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] <= 10 && (string)d["bar"] == "zoop" select d); - Assert.AreEqual( - new Document() - .Append("foo", new Document().Append("$lte", 10)) - .Append("bar","zoop"), - q.Query); - } - - [Test] - public void Can_do_and_queries_on_same_key(){ - var q = (IMongoQuery)(from d in queryable where (int)d["foo"] < 10 && (int)d["foo"] > 5 select d); - Assert.AreEqual( - new Document().Append("foo", new Document().Append("$lt", 10).Append("$gt",5)), - q.Query); - } - - [Test] - public void Can_compose_queries() { - // Note (sdether): this passes without explicit AND support, which is a bit scary - var q1 = from d in queryable where (int)d["foo"] <= 10 select d; - var q2 = (IMongoQuery)(from d in q1 where (string)d["bar"] == "zoop" select d); - Assert.AreEqual( - new Document() - .Append("foo", new Document().Append("$lte", 10)) - .Append("bar", "zoop"), - q2.Query); - } - - [Test] - public void Can_use_dot_notation_for_queries() { - // Note (sdether): dot.notation in document is a bit of a perversion, since it's not legal - // in a document to be saved. So this syntax may break, if Document becomes more strict - var q = (IMongoQuery)(from d in queryable where (int)d["foo.bar"] == 10 select d); - Assert.AreEqual(new Document().Append("foo.bar",10).ToString(), q.Query.ToString()); - } - } - // ReSharper restore InconsistentNaming -} diff --git a/MongoDB.Linq.Tests/test-results/MongoDB.Linq.Tests.csproj-Debug-2009-10-12.xml b/MongoDB.Linq.Tests/test-results/MongoDB.Linq.Tests.csproj-Debug-2009-10-12.xml deleted file mode 100644 index b17da309..00000000 --- a/MongoDB.Linq.Tests/test-results/MongoDB.Linq.Tests.csproj-Debug-2009-10-12.xml +++ /dev/null @@ -1,2301 +0,0 @@ - - - - - 2009-10-12T22:40:45 - Success Failure - 75 - 2 - 0 - - - - - - - 2009-10-12T22:40:45 - Success Failure - 75 - 2 - 0 - - - - - - - 2009-10-12T22:40:45 - Success Failure - 75 - 2 - 0 - - - - - - - 2009-10-12T22:40:45 - Success Failure - 75 - 2 - 0 - - - - - - - 2009-10-12T22:40:45 - Success Failure - 2 - 2 - 0 - - - - - - - 2009-10-12T22:40:45 - Success - 0 - 1 - 0 - - - - - - - 2009-10-12T22:40:45 - Success - 0 - 1 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - - - 2009-10-12T22:40:45 - Failure - 44 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - - - 2009-10-12T22:40:45 - Failure - 28 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - 2009-10-12T22:40:45 - Failure - 1 - 0 - 0 - - - - - - - - - - - - - \ No newline at end of file diff --git a/MongoDB.Linq/IMongoQuery.cs b/MongoDB.Linq/IMongoQuery.cs deleted file mode 100644 index 3d2b5387..00000000 --- a/MongoDB.Linq/IMongoQuery.cs +++ /dev/null @@ -1,11 +0,0 @@ -using System.Linq; -using MongoDB.Driver; - -namespace MongoDB.Linq { - public interface IMongoQuery : IQueryable { - Document Query { get; } - int Limit { get; } - int Skip { get; } - Document Fields { get; } - } -} diff --git a/MongoDB.Linq/IMongoQueryProvider.cs b/MongoDB.Linq/IMongoQueryProvider.cs deleted file mode 100644 index 2ee68dc9..00000000 --- a/MongoDB.Linq/IMongoQueryProvider.cs +++ /dev/null @@ -1,9 +0,0 @@ -using System.Linq; -using System.Linq.Expressions; - -namespace MongoDB.Linq { - public interface IMongoQueryProvider : IQueryProvider { - MongoQuerySpec GetQuerySpec(Expression expression); - } - -} diff --git a/MongoDB.Linq/MongoDB.Linq.csproj b/MongoDB.Linq/MongoDB.Linq.csproj deleted file mode 100644 index 024a200a..00000000 --- a/MongoDB.Linq/MongoDB.Linq.csproj +++ /dev/null @@ -1,72 +0,0 @@ - - - - Debug - AnyCPU - 9.0.30729 - 2.0 - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E} - Library - Properties - MongoDB.Linq - MongoDB.Linq - v3.5 - 512 - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - - - - - 3.5 - - - 3.5 - - - 3.5 - - - - - - - - - - - - - - - - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - - - \ No newline at end of file diff --git a/MongoDB.Linq/MongoDocumentQuery.cs b/MongoDB.Linq/MongoDocumentQuery.cs deleted file mode 100644 index e50d14ee..00000000 --- a/MongoDB.Linq/MongoDocumentQuery.cs +++ /dev/null @@ -1,96 +0,0 @@ -using System; -using MongoDB.Driver; - -namespace MongoDB.Linq { - - /// - /// This class is a construct for writing strongly typed query expressions for Document fields. - /// It is not meant to be used outside of expressions, since most functions and operators return - /// fake data and are only used to extract parameter information from expressions. - /// - public class MongoDocumentQuery { - private readonly string key; - - public MongoDocumentQuery(Document document, string key) { - this.key = key; - } - - public string Key { get { return key; } } - - public bool In(params T[] values) { - return false; - } - public bool NotIn(params T[] values) { - return false; - } - - public static bool operator ==(MongoDocumentQuery a, string b) { return false; } - public static bool operator !=(MongoDocumentQuery a, string b) { return false; } - public static bool operator ==(string a, MongoDocumentQuery b) { return false; } - public static bool operator !=(string a, MongoDocumentQuery b) { return false; } - - public static bool operator >(MongoDocumentQuery a, int b) { return false; } - public static bool operator >=(MongoDocumentQuery a, int b) { return false; } - public static bool operator <(MongoDocumentQuery a, int b) { return false; } - public static bool operator <=(MongoDocumentQuery a, int b) { return false; } - public static bool operator ==(MongoDocumentQuery a, int b) { return false; } - public static bool operator !=(MongoDocumentQuery a, int b) { return false; } - public static bool operator >(int a, MongoDocumentQuery b) { return false; } - public static bool operator >=(int a, MongoDocumentQuery b) { return false; } - public static bool operator <(int a, MongoDocumentQuery b) { return false; } - public static bool operator <=(int a, MongoDocumentQuery b) { return false; } - public static bool operator ==(int a, MongoDocumentQuery b) { return false; } - public static bool operator !=(int a, MongoDocumentQuery b) { return false; } - - public static bool operator >(MongoDocumentQuery a, double b) { return false; } - public static bool operator >=(MongoDocumentQuery a, double b) { return false; } - public static bool operator <(MongoDocumentQuery a, double b) { return false; } - public static bool operator <=(MongoDocumentQuery a, double b) { return false; } - public static bool operator ==(MongoDocumentQuery a, double b) { return false; } - public static bool operator !=(MongoDocumentQuery a, double b) { return false; } - public static bool operator >(double a, MongoDocumentQuery b) { return false; } - public static bool operator >=(double a, MongoDocumentQuery b) { return false; } - public static bool operator <(double a, MongoDocumentQuery b) { return false; } - public static bool operator <=(double a, MongoDocumentQuery b) { return false; } - public static bool operator ==(double a, MongoDocumentQuery b) { return false; } - public static bool operator !=(double a, MongoDocumentQuery b) { return false; } - - public static bool operator >(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator >=(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator <(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator <=(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator ==(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator !=(MongoDocumentQuery a, DateTime b) { return false; } - public static bool operator >(DateTime a, MongoDocumentQuery b) { return false; } - public static bool operator >=(DateTime a, MongoDocumentQuery b) { return false; } - public static bool operator <(DateTime a, MongoDocumentQuery b) { return false; } - public static bool operator <=(DateTime a, MongoDocumentQuery b) { return false; } - public static bool operator ==(DateTime a, MongoDocumentQuery b) { return false; } - public static bool operator !=(DateTime a, MongoDocumentQuery b) { return false; } - - public bool Equals(MongoDocumentQuery other) - { - if(ReferenceEquals(null, other)) - return false; - if(ReferenceEquals(this, other)) - return true; - return Equals(other.key, key); - } - - public override bool Equals(object obj) - { - if(ReferenceEquals(null, obj)) - return false; - if(ReferenceEquals(this, obj)) - return true; - if(obj.GetType() != typeof(MongoDocumentQuery)) - return false; - return Equals((MongoDocumentQuery)obj); - } - - public override int GetHashCode() - { - return (key != null ? key.GetHashCode() : 0); - } - } -} diff --git a/MongoDB.Linq/MongoLinqEx.cs b/MongoDB.Linq/MongoLinqEx.cs deleted file mode 100644 index c98b2e67..00000000 --- a/MongoDB.Linq/MongoLinqEx.cs +++ /dev/null @@ -1,17 +0,0 @@ -using System; -using MongoDB.Driver; - -namespace MongoDB.Linq -{ - public static class MongoLinqEx - { - public static IMongoQuery AsQueryable(this T collection) where T : IMongoCollection - { - return new MongoQuery(new MongoQueryProvider(collection)); - } - - public static MongoDocumentQuery Key(this T document,string key) where T: Document{ - return new MongoDocumentQuery(document,key); - } - } -} diff --git a/MongoDB.Linq/MongoQuery.cs b/MongoDB.Linq/MongoQuery.cs deleted file mode 100644 index 67095db1..00000000 --- a/MongoDB.Linq/MongoQuery.cs +++ /dev/null @@ -1,69 +0,0 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.Linq; -using System.Linq.Expressions; -using MongoDB.Driver; - -namespace MongoDB.Linq -{ - public class MongoQuery : IMongoQuery - { - private readonly IMongoQueryProvider queryProvider; - private readonly Expression expression; - private MongoQuerySpec querySpec; - - public MongoQuery(IMongoQueryProvider queryProvider) - { - this.queryProvider = queryProvider; - expression = Expression.Constant(this); - } - - public MongoQuery(IMongoQueryProvider queryProvider, Expression expression) - { - this.queryProvider = queryProvider; - this.expression = expression; - } - - public IEnumerator GetEnumerator() - { - return ((IEnumerable)queryProvider.Execute(expression)).GetEnumerator(); - } - - IEnumerator IEnumerable.GetEnumerator() - { - return ((IEnumerable)queryProvider.Execute(expression)).GetEnumerator(); - } - - public Expression Expression - { - get { return expression; } - } - - public Type ElementType - { - get { return typeof(Document); } - } - - public IQueryProvider Provider - { - get { return queryProvider; } - } - - private MongoQuerySpec QuerySpec - { - get - { - if (querySpec == null) - { - querySpec = queryProvider.GetQuerySpec(expression); - } - return querySpec; - } - } - public Document Query { get { return QuerySpec.Query; } } - public int Limit { get { return QuerySpec.Limit; } } - public int Skip { get { return QuerySpec.Skip; } } - public Document Fields { get { return QuerySpec.Fields; } } - } -} diff --git a/MongoDB.Linq/MongoQueryProvider.cs b/MongoDB.Linq/MongoQueryProvider.cs deleted file mode 100644 index a22ee016..00000000 --- a/MongoDB.Linq/MongoQueryProvider.cs +++ /dev/null @@ -1,55 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Linq.Expressions; -using System.Text; -using MongoDB.Driver; - -namespace MongoDB.Linq { - public class MongoQueryProvider : IMongoQueryProvider { - - private struct Result { - public IEnumerable Documents; - public bool IsFirstCall; - } - - private readonly IMongoCollection collection; - - public MongoQueryProvider(IMongoCollection collection) { - this.collection = collection; - } - - public IQueryable CreateQuery(Expression expression) { - return new MongoQuery(this, expression); - } - - public IQueryable CreateQuery(Expression expression) { - return (IQueryable)new MongoQuery(this, expression); - } - - public object Execute(Expression expression) { - return ExecuteInternal(expression).Documents; - } - - private Result ExecuteInternal(Expression expression) { - var spec = new MongoQueryTranslator().Translate(expression); - var cur = collection.Find(spec.Query, spec.Limit, spec.Skip, spec.Fields); - return new Result { - Documents = cur.Documents, - IsFirstCall = spec.IsFirstCall - }; - } - - public TResult Execute(Expression expression) { - var result = ExecuteInternal(expression); - if (typeof(TResult).IsAssignableFrom(typeof(Document))) { - return (TResult)(object)((result.IsFirstCall) ? result.Documents.First() : result.Documents.FirstOrDefault()); - } - return (TResult)result.Documents; - } - - public MongoQuerySpec GetQuerySpec(Expression expression) { - return new MongoQueryTranslator().Translate(expression); - } - } -} diff --git a/MongoDB.Linq/MongoQueryTranslator.cs b/MongoDB.Linq/MongoQueryTranslator.cs deleted file mode 100644 index f6765005..00000000 --- a/MongoDB.Linq/MongoQueryTranslator.cs +++ /dev/null @@ -1,309 +0,0 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Reflection; -using System.Linq.Expressions; -using MongoDB.Driver; - -namespace MongoDB.Linq { - public class MongoQuerySpec { - public readonly Document Query; - public readonly int Limit; - public readonly int Skip; - public readonly Document Fields; - public readonly Document SortOrder; - public readonly bool IsFirstCall; - - public MongoQuerySpec(Document query, int limit, int skip, Document fields, Document sortOrder, bool isFirstCall) { - Query = query; - Limit = limit; - Skip = skip; - Fields = fields; - SortOrder = sortOrder; - IsFirstCall = isFirstCall; - } - } - - public class MongoQueryTranslator : ExpressionVisitor { - - private Document query; - private int limit; - private int skip; - private Document fields; - private Document sortOrder; - private bool isFirstCall = false; - private bool inConditional = false; - private bool foundKey = false; - private readonly Stack valueStack = new Stack(); - private readonly Stack keyStack = new Stack(); - - protected Document Query { - get { - if (query == null) { - query = new Document(); - } - return query; - } - } - - protected Document Fields { - get { - if (fields == null) { - fields = new Document(); - } - return fields; - } - } - - protected Document SortOrder { - get { - if (sortOrder == null) { - sortOrder = new Document(); - } - return sortOrder; - } - } - - public MongoQuerySpec Translate(Expression expression) { - Visit(expression); - return new MongoQuerySpec(query, limit, skip, fields, sortOrder, isFirstCall); - } - - private static Expression StripQuotes(Expression e) { - while (e.NodeType == ExpressionType.Quote) { - e = ((UnaryExpression)e).Operand; - } - return e; - } - - protected override Expression VisitMethodCall(MethodCallExpression m) { - Debug.WriteLine(string.Format("Method call: {0}", m.Method.Name)); - if (m.Method.DeclaringType == typeof(Queryable)) { - switch (m.Method.Name) { - case "Where": - Visit(m.Arguments[0]); - var lambda = (LambdaExpression)StripQuotes(m.Arguments[1]); - Visit(lambda.Body); - break; - case "Skip": - Visit(m.Arguments[0]); - Visit(m.Arguments[1]); - skip = (int)valueStack.Pop(); - break; - case "Take": - Visit(m.Arguments[0]); - Visit(m.Arguments[1]); - limit = (int)valueStack.Pop(); - break; - case "First": - isFirstCall = true; - limit = 1; - Visit(m.Arguments[0]); - break; - case "FirstOrDefault": - limit = 1; - Visit(m.Arguments[0]); - break; - case "Select": - break; - default: - throw new NotSupportedException(string.Format("The method '{0}' on queryable type is not supported", m.Method.Name)); - } - - return m; - } - if (m.Object == null && m.Method.Name == "Key") { - Visit(m.Arguments[1]); - keyStack.Push((string)valueStack.Pop()); - foundKey = true; - return m; - } - if (m.Object != null && typeof(MongoDocumentQuery).IsAssignableFrom(m.Object.Type)) { - Debug.WriteLine("call on MongoDocumentQuery"); - Visit(((MethodCallExpression)m.Object).Arguments[1]); - keyStack.Push((string)valueStack.Pop()); - foundKey = true; - switch (m.Method.Name) { - case "In": - case "NotIn": - var argsLambda = Expression.Lambda(m.Arguments[0]); - var argsValue = argsLambda.Compile().DynamicInvoke(); - Query.Add(keyStack.Pop(), new Document().Append((m.Method.Name == "In") ? "$in" : "$nin", argsValue)); - break; - case "Equals": - Visit(m.Arguments[0]); - AddEqualityQuery(); - break; - } - return m; - } - if (m.Method.Name == "Equals") { - if (m.Object == null) { - Visit(m.Arguments[0]); - Visit(m.Arguments[1]); - } else { - Visit(m.Object); - Visit(m.Arguments[0]); - } - AddEqualityQuery(); - return m; - } - if (m.Object != null && typeof(Document).IsAssignableFrom(m.Object.Type) && m.Method.Name == "get_Item") { - Debug.WriteLine("Document indexer access, divining query key"); - Visit(m.Arguments[0]); - keyStack.Push((string)valueStack.Pop()); - foundKey = true; - return m; - } - - Debug.WriteLine("unrecognized method call, trying to convert to constant value"); - try { - var methodCallLambda = Expression.Lambda(m); - var methodConstValue = methodCallLambda.Compile().DynamicInvoke(); - valueStack.Push(methodConstValue); - } catch (Exception e) { - throw new NotSupportedException(string.Format("The method '{0}' could not be converted into a constant", m.Method.Name), e); - } - return m; - } - - private void AddEqualityQuery() { - var key = keyStack.Pop(); - var value = valueStack.Pop(); - Query.Append(key, value); - } - - protected override Expression VisitUnary(UnaryExpression u) { - Debug.WriteLine(string.Format("Unary type: {0}", u.NodeType)); - switch (u.NodeType) { - case ExpressionType.Not: - break; - case ExpressionType.Convert: - Visit(StripConvert(u)); - break; - default: - throw new NotSupportedException(string.Format("The unary operator '{0}' is not supported", u.NodeType)); - } return u; - } - - private Expression StripConvert(Expression expression) { - while (expression.NodeType == ExpressionType.Convert) { - expression = ((UnaryExpression)expression).Operand; - } - return expression; - } - - protected override Expression VisitBinary(BinaryExpression b) { - Debug.WriteLine(string.Format("Binary type: {0}", b.NodeType)); - string key; - object value; - switch (b.NodeType) { - case ExpressionType.Equal: - Visit(b.Left); - Visit(b.Right); - AddEqualityQuery(); - break; - case ExpressionType.NotEqual: - case ExpressionType.LessThan: - case ExpressionType.LessThanOrEqual: - case ExpressionType.GreaterThan: - case ExpressionType.GreaterThanOrEqual: - if (inConditional) { - throw new NotSupportedException("cannot handle nested conditionals"); - } - // Note (sdether): because of conditional ordering, left and right visits have to happen inside the - // inConditional = true block, which is why the visits cannot be moved to the top of the switch - inConditional = true; - Visit(b.Left); - bool reverseConditional = false; - if (!foundKey) { - reverseConditional = true; - } - Visit(b.Right); - key = keyStack.Pop(); - value = valueStack.Pop(); - var conditional = "$ne"; - switch (b.NodeType) { - case ExpressionType.LessThan: conditional = reverseConditional ? "$gt" : "$lt"; break; - case ExpressionType.LessThanOrEqual: conditional = reverseConditional ? "$gte" : "$lte"; break; - case ExpressionType.GreaterThan: conditional = reverseConditional ? "$lt" : "$gt"; break; - case ExpressionType.GreaterThanOrEqual: conditional = reverseConditional ? "$lte" : "$gte"; break; - } - if(Query.Contains(key)){ - ((Document)Query[key]).Append(conditional,value); - }else{ - Query.Append(key, new Document().Append(conditional, value)); - } - inConditional = false; - foundKey = false; - break; - case ExpressionType.AndAlso: - Visit(b.Left); - Visit(b.Right); - break; - default: - throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported", b.NodeType)); - } - - return b; - } - - protected override Expression VisitConstant(ConstantExpression c) { - Debug.WriteLine(string.Format("constant: ({0}){1}", c.Type, c.Value)); - if (c.Value is MongoQuery) { - Debug.WriteLine("constant of type MongoQuery is our terminal, ignore"); - } else { - switch (Type.GetTypeCode(c.Value.GetType())) { - case TypeCode.Boolean: - case TypeCode.DateTime: - case TypeCode.Decimal: - case TypeCode.Double: - case TypeCode.Int16: - case TypeCode.Int32: - case TypeCode.Int64: - case TypeCode.Single: - case TypeCode.String: - case TypeCode.UInt16: - case TypeCode.UInt32: - case TypeCode.UInt64: - valueStack.Push(c.Value); - break; - default: - throw new NotSupportedException(string.Format("The constant for '{0}' is not supported", c.Value)); - } - } - return c; - } - - protected override Expression VisitMemberAccess(MemberExpression m) { - Debug.WriteLine(string.Format("Member Access: {0}", m.Member.Name)); - if (m.Expression != null) { - if (m.Expression.NodeType == ExpressionType.Parameter) { - return m; - } - if (m.Expression != null && m.Expression.NodeType == ExpressionType.Constant) { - switch (m.Member.MemberType) { - case MemberTypes.Property: - var propertyInfo = (PropertyInfo)m.Member; - var innerMember = (MemberExpression)m.Expression; - var closureFieldInfo = (FieldInfo)innerMember.Member; - var obj = closureFieldInfo.GetValue(((ConstantExpression)innerMember.Expression).Value); - valueStack.Push(propertyInfo.GetValue(obj, null)); - break; - case MemberTypes.Field: - var fieldInfo = (FieldInfo)m.Member; - valueStack.Push(fieldInfo.GetValue(((ConstantExpression)m.Expression).Value)); - break; - default: - Visit(m.Expression); - break; - } - return m; - } - } - throw new NotSupportedException(string.Format("The member '{0}' is not supported", m.Member.Name)); - } - } -} \ No newline at end of file diff --git a/MongoDB.Linq/Properties/AssemblyInfo.cs b/MongoDB.Linq/Properties/AssemblyInfo.cs deleted file mode 100644 index 836f0c87..00000000 --- a/MongoDB.Linq/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("MongoDB.Linq")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("MongoDB.Linq")] -[assembly: AssemblyCopyright("Copyright © 2009")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("443b51f7-36c7-434b-9d50-2a22a2d9be44")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/MongoDB.Net-Tests/.gitignore b/MongoDB.Net-Tests/.gitignore deleted file mode 100644 index d09da2cd..00000000 --- a/MongoDB.Net-Tests/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -*.dll -*.pdb -*.xml -obj/* -PartCover/* diff --git a/MongoDB.Net-Tests/Bson/TestBsonBinary.cs b/MongoDB.Net-Tests/Bson/TestBsonBinary.cs deleted file mode 100755 index 99db4af2..00000000 --- a/MongoDB.Net-Tests/Bson/TestBsonBinary.cs +++ /dev/null @@ -1,68 +0,0 @@ -using System; -using System.IO; -using NUnit.Framework; - -using MongoDB.Driver; - -namespace MongoDB.Driver.Bson -{ - [TestFixture] - public class TestBsonBinary - { - [Test] - public void TestRoundTrip () - { - Document idoc = new Document (); - idoc.Add ("b", new Binary (new byte[] { (byte)1, (byte)2 })); - - MemoryStream stream = new MemoryStream (); - BsonWriter writer = new BsonWriter (stream); - writer.Write (idoc); - - stream.Seek (0, SeekOrigin.Begin); - BsonReader reader = new BsonReader (stream); - Document odoc = reader.Read (); - - Assert.AreEqual (idoc.ToString (), odoc.ToString ()); - } - - [Test] - public void TestBinaryRead () - { - string hex = "28000000075f6964004b1971811d8b0f00c0000000056461746100070000000203000000e188b400"; - - byte[] data = DecodeHex (hex); - MemoryStream inmem = new MemoryStream (data); - BsonReader inreader = new BsonReader (inmem); - Document indoc = new Document (); - indoc = inreader.Read (); - - MemoryStream outmem = new MemoryStream (); - BsonWriter outwriter = new BsonWriter (outmem); - outwriter.Write (indoc); - byte[] outdata = outmem.ToArray (); - String outhex = BitConverter.ToString (outdata); - outhex = outhex.Replace ("-", ""); - - Assert.AreEqual (hex, outhex.ToLower ()); - - } - - protected static byte[] DecodeHex (string val) - { - int numberChars = val.Length; - - byte[] bytes = new byte[numberChars / 2]; - for (int i = 0; i < numberChars; i += 2) { - try { - bytes[i / 2] = Convert.ToByte (val.Substring (i, 2), 16); - } catch { - //failed to convert these 2 chars, they may contain illegal charracters - bytes[i / 2] = 0; - } - } - return bytes; - } - - } -} diff --git a/MongoDB.Net-Tests/Bson/TestBsonReader.cs b/MongoDB.Net-Tests/Bson/TestBsonReader.cs deleted file mode 100644 index 8f711be8..00000000 --- a/MongoDB.Net-Tests/Bson/TestBsonReader.cs +++ /dev/null @@ -1,287 +0,0 @@ -using System; -using System.IO; -using System.Text; - -using NUnit.Framework; - -namespace MongoDB.Driver.Bson -{ - [TestFixture] - public class TestBsonReader - { - char pound = '\u00a3'; - char euro = '\u20ac'; - - [Test] - public void TestReadString(){ - byte[] buf = HexToBytes("7465737400"); - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - - String s = reader.ReadString(); - Assert.AreEqual("test",s); - Assert.AreEqual(4,Encoding.UTF8.GetByteCount(s)); - } - - [Test] - public void TestReadStringLong(){ - StringBuilder sb = new StringBuilder(); - sb.Append('t',256); - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadString(expected)); - } - - [Test] - public void TestReadStringWithUKPound(){ - string expected = "1234£56"; - Assert.AreEqual(expected, WriteAndReadString(expected)); - } - - [Test] - public void TestReadStringBreakDblByteCharOverBuffer(){ - StringBuilder sb = new StringBuilder(); - sb.Append('1',127); - sb.Append(pound); //will break the pound symbol over the buffer boundry. - //sb.Append("1"); - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadString(expected)); - } - - [Test] - public void TestReadStringDblByteCharOnEndOfBufferBoundry(){ - StringBuilder sb = new StringBuilder(); - sb.Append(pound, 66); //puts a pound symbol at the end of the buffer boundry but not broken. - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadString(expected)); - } - - [Test] - public void TestReadStringTripleByteCharBufferBoundry(){ - StringBuilder sb = new StringBuilder(); - sb.Append("12"); - sb.Append(euro, 66); //will break the euro symbol over the buffer boundry. - - string expected = sb.ToString(); - - Assert.AreEqual(expected, WriteAndReadString(expected)); - } - - private string WriteAndReadString(string val){ - byte[] buf = Encoding.UTF8.GetBytes(val + '\0'); - - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - return reader.ReadString(); - } - - [Test] - public void TestReadLenStringValue(){ - string expected = "test"; - - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringShortTripleByte(){ - StringBuilder sb = new StringBuilder(); - //sb.Append('1',127); //first char of euro at the end of the boundry. - //sb.Append(euro, 5); - //sb.Append('1',128); - sb.Append(euro); - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringTripleByteCharBufferBoundry0(){ - StringBuilder sb = new StringBuilder(); - sb.Append('1',127); //first char of euro at the end of the boundry. - sb.Append(euro, 5); - sb.Append('1',128); - sb.Append(euro); - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringTripleByteCharBufferBoundry1(){ - StringBuilder sb = new StringBuilder(); - sb.Append('1',126); - sb.Append(euro, 5); //middle char of euro at the end of the boundry. - sb.Append('1',128); - sb.Append(euro); - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringTripleByteCharOne(){ - StringBuilder sb = new StringBuilder(); - sb.Append(euro, 1); //Just one triple byte char in the string. - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringTripleByteCharBufferBoundry2(){ - StringBuilder sb = new StringBuilder(); - sb.Append('1', 125); - sb.Append(euro, 5); //last char of the eruo at the end of the boundry. - sb.Append('1',128); - sb.Append(euro); - - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenString(){ - string expected = "test"; - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - [Test] - public void TestReadLenStringLong(){ - StringBuilder sb = new StringBuilder(); - sb.Append('t',150); - string expected = sb.ToString(); - Assert.AreEqual(expected, WriteAndReadLenString(expected)); - } - - private string WriteAndReadLenString(string val){ - MemoryStream ms = new MemoryStream(); - BsonWriter bs = new BsonWriter(ms); - BinaryWriter w = new BinaryWriter(ms); - int byteCount = bs.CalculateSize(val,false); - w.Write(byteCount); - bs.WriteString(val); - ms.Seek(0,SeekOrigin.Begin); - BsonReader reader = new BsonReader(ms); - return reader.ReadLenString(); - } - - - [Test] - public void TestReadEmptyDocument(){ - byte[] buf = HexToBytes("0500000000"); - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - - Document doc = reader.ReadDocument(); - - Assert.IsNotNull(doc); - } - - [Test] - public void TestReadSimpleDocument(){ - byte[] buf = HexToBytes("1400000002746573740005000000746573740000"); - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - - Document doc = reader.Read(); - - Assert.IsNotNull(doc, "Document was null"); - Assert.IsTrue(doc.Contains("test")); - Assert.AreEqual("test", (String)doc["test"]); - } - - [Test] - public void TestReadMultiElementDocument(){ - byte[] buf = HexToBytes("2D000000075F6964004A753AD8FAC16EA58B290351016100000000000000F03F02620005000000746573740000"); - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - - Document doc = reader.ReadDocument(); - - Assert.IsNotNull(doc, "Document was null"); - Assert.IsTrue(doc.Contains("_id")); - Assert.IsTrue(doc.Contains("a")); - Assert.IsTrue(doc.Contains("b")); - Assert.AreEqual("\"4a753ad8fac16ea58b290351\"", ((Oid)doc["_id"]).ToString()); - Assert.AreEqual(1, Convert.ToInt32(doc["a"])); - Assert.AreEqual("test", (String)doc["b"]); - } - - [Test] - public void TestReadDocWithDocs(){ -// Document doc = new Document().Append("a", new Document().Append("b", new Document().Append("c",new Document()))); -// Console.WriteLine(ConvertDocToHex(doc)); - byte[] buf = HexToBytes("1D000000036100150000000362000D0000000363000500000000000000"); - MemoryStream ms = new MemoryStream(buf); - BsonReader reader = new BsonReader(ms); - - Document doc = reader.ReadDocument(); - Assert.IsNotNull(doc, "Document was null"); - Assert.AreEqual(buf.Length, reader.Position); - Assert.IsTrue(doc.Contains("a")); - - } - - [Test] - public void TestReadBigDocument(){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - Document expected = new Document(); - expected.Append("str", "test") - .Append("int", 45) - .Append("long", (long)46) - .Append("num", 4.5) - .Append("date",DateTime.Today) - .Append("_id", new OidGenerator().Generate()) - .Append("code", new Code("return 1;")) - .Append("subdoc", new Document().Append("a",1).Append("b",2)) - .Append("array", new String[]{"a","b","c","d"}) - .Append("codewscope", new CodeWScope("return 2;", new Document().Append("c",1))) - .Append("binary", new Binary(new byte[]{0,1,2,3})) - .Append("regex", new MongoRegex("[A-Z]")) - .Append("minkey", MongoMinKey.Value) - .Append("maxkey", MongoMaxKey.Value) - ; - writer.Write(expected); - writer.Flush(); - ms.Seek(0,SeekOrigin.Begin); - - BsonReader reader = new BsonReader(ms); - Document doc = reader.Read(); - - Assert.IsNotNull(doc); - } - - private String ConvertDocToHex(Document doc){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - writer.Write(doc); - return BitConverter.ToString(ms.ToArray()).Replace("-",""); - - } - - private byte[] HexToBytes(string hex){ - //TODO externalize somewhere. - if(hex.Length % 2 == 1){ - System.Console.WriteLine("uneven number of hex pairs."); - hex = "0" + hex; - } - int numberChars = hex.Length; - byte[] bytes = new byte[numberChars / 2]; - for (int i = 0; i < numberChars; i += 2){ - try{ - bytes[i / 2] = Convert.ToByte(hex.Substring(i, 2), 16); - } - catch{ - //failed to convert these 2 chars, they may contain illegal charracters - bytes[i / 2] = 0; - } - } - return bytes; - } - - } -} diff --git a/MongoDB.Net-Tests/Bson/TestBsonWriter.cs b/MongoDB.Net-Tests/Bson/TestBsonWriter.cs deleted file mode 100644 index aff7daac..00000000 --- a/MongoDB.Net-Tests/Bson/TestBsonWriter.cs +++ /dev/null @@ -1,151 +0,0 @@ -using System; -using System.IO; -using System.Text; - -using NUnit.Framework; - -using MongoDB.Driver; - -namespace MongoDB.Driver.Bson -{ - [TestFixture] - public class TestBsonWriter - { - char euro = '\u20ac'; - [Test] - public void TestCalculateSizeOfEmptyDoc(){ - Document doc = new Document(); - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - Assert.AreEqual(5,writer.CalculateSize(doc)); - } - - [Test] - public void TestCalculateSizeOfSimpleDoc(){ - Document doc = new Document(); - doc.Append("a","a"); - doc.Append("b",1); - - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - //BsonDocument bdoc = BsonConvert.From(doc); - - Assert.AreEqual(21,writer.CalculateSize(doc)); - } - - [Test] - public void TestCalculateSizeOfComplexDoc(){ - Document doc = new Document(); - doc.Append("a","a"); - doc.Append("b",1); - Document sub = new Document().Append("c_1",1).Append("c_2",DateTime.Now); - doc.Append("c",sub); - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - Assert.AreEqual(51,writer.CalculateSize(doc)); - } - - [Test] - public void TestWriteString(){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - string expected = "54-65-73-74-73-2E-69-6E-73-65-72-74-73-00"; - writer.WriteString("Tests.inserts"); - - string hexdump = BitConverter.ToString(ms.ToArray()); - - Assert.AreEqual(expected, hexdump); - } - - [Test] - public void TestWriteMultibyteString(){ - - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - string val = new StringBuilder().Append(euro,3).ToString(); - string expected = BitConverter.ToString(Encoding.UTF8.GetBytes(val + '\0')); - Assert.AreEqual(expected,WriteStringAndGetHex(val)); - } - - [Test] - public void TestWriteMultibyteStringLong(){ - - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - string val = new StringBuilder().Append("ww").Append(euro,180).ToString(); - string expected = BitConverter.ToString(Encoding.UTF8.GetBytes(val + '\0')); - Assert.AreEqual(expected,WriteStringAndGetHex(val)); - } - - private string WriteStringAndGetHex(string val){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - writer.WriteString(val); - return BitConverter.ToString(ms.ToArray()); - } - - [Test] - public void TestWriteDocument(){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - string expected = "1400000002746573740005000000746573740000"; - Document doc = new Document().Append("test", "test"); - - writer.Write(doc); - - string hexdump = BitConverter.ToString(ms.ToArray()); - hexdump = hexdump.Replace("-",""); - - Assert.AreEqual(expected, hexdump); - } - - [Test] - public void TestWriteArrayDoc(){ - String expected = "2000000002300002000000610002310002000000620002320002000000630000"; - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - - String[] str = new String[]{"a","b","c"}; - writer.WriteValue(BsonDataType.Array,str); - - string hexdump = BitConverter.ToString(ms.ToArray()); - hexdump = hexdump.Replace("-",""); - Assert.AreEqual(expected, hexdump); - } - - [Test] - public void TestNullsDontThrowExceptions(){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - Document doc = new Document().Append("n", null); - try{ - writer.Write(doc); - }catch(NullReferenceException){ - Assert.Fail("Null Reference Exception was thrown on trying to serialize a null value"); - } - } - - [Test] - public void TestWritingTooLargeDocument(){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); - Binary b = new Binary(new byte[BsonInfo.MaxDocumentSize]); - Document big = new Document().Append("x", b); - bool thrown = false; - try{ - writer.Write(big); - }catch(ArgumentException){ - thrown = true; - }catch(Exception e){ - Assert.Fail("Wrong Exception thrown " + e.GetType().Name); - } - - Assert.IsTrue(thrown, "Shouldn't be able to write large document"); - } - - } -} diff --git a/MongoDB.Net-Tests/Configuration/TestConfigurationSection.cs b/MongoDB.Net-Tests/Configuration/TestConfigurationSection.cs deleted file mode 100644 index 39ed3834..00000000 --- a/MongoDB.Net-Tests/Configuration/TestConfigurationSection.cs +++ /dev/null @@ -1,25 +0,0 @@ -using System; -using System.Configuration; - -using NUnit.Framework; - -using MongoDB.Driver; - -namespace MongoDB.Driver.Configuration -{ - [TestFixture] - public class TestConfigurationSection - { - [Test] - public void TestReadNamed(){ - MongoConfiguration config = (MongoConfiguration)ConfigurationManager.GetSection("Mongo"); - Assert.AreEqual("Server=localhost:27018", config.Connections["local21018"].ConnectionString); - } - - [Test] - public void TestReadDefaults(){ - MongoConfiguration config = (MongoConfiguration)ConfigurationManager.GetSection("Mongo"); - Assert.AreEqual("Server=localhost:27017", config.Connections["defaults"].ConnectionString); - } - } -} diff --git a/MongoDB.Net-Tests/Connections/TestConnection.cs b/MongoDB.Net-Tests/Connections/TestConnection.cs deleted file mode 100644 index b61c03fa..00000000 --- a/MongoDB.Net-Tests/Connections/TestConnection.cs +++ /dev/null @@ -1,67 +0,0 @@ -using System; -using System.IO; -using MongoDB.Driver.Protocol; -using NUnit.Framework; -using MongoDB.Driver.IO; - -namespace MongoDB.Driver.Connections -{ - [TestFixture()] - public class TestConnection - { - [Test] - public void TestSendQueryMessage(){ - //Connection conn = new Connection("10.141.153.2"); - Connections.Connection conn = ConnectionFactory.GetConnection(string.Empty); - conn.Open(); - - QueryMessage qmsg = generateQueryMessage(); - conn.SendTwoWayMessage(qmsg); - - conn.Close(); - } - - [Test] - public void TestReconnectOnce(){ - Connections.Connection conn = ConnectionFactory.GetConnection(string.Empty); - conn.Open(); - - WriteBadMessage(conn); - try{ - QueryMessage qmsg = generateQueryMessage(); - conn.SendTwoWayMessage(qmsg); - - }catch(IOException){ - //Should be able to resend. - Assert.IsTrue(conn.State == ConnectionState.Opened); - QueryMessage qmsg = generateQueryMessage(); - ReplyMessage rmsg = conn.SendTwoWayMessage(qmsg); - Assert.IsNotNull(rmsg); - - } - } - - protected void WriteBadMessage(Connections.Connection conn){ - //Write a bad message to the socket to force mongo to shut down our connection. - BinaryWriter writer = new BinaryWriter(conn.GetStream()); - System.Text.UTF8Encoding encoding=new System.Text.UTF8Encoding(); - Byte[] msg = encoding.GetBytes("Goodbye MongoDB!"); - writer.Write(16 + msg.Length + 1); - writer.Write(1); - writer.Write(1); - writer.Write(1001); - writer.Write(msg); - writer.Write((byte)0); - } - - protected QueryMessage generateQueryMessage(){ - Document qdoc = new Document(); - qdoc.Add("listDatabases", 1.0); - //QueryMessage qmsg = new QueryMessage(qdoc,"system.namespaces"); - QueryMessage qmsg = new QueryMessage(qdoc,"admin.$cmd"); - qmsg.NumberToReturn = -1; - - return qmsg; - } - } -} \ No newline at end of file diff --git a/MongoDB.Net-Tests/Connections/TestConnectionFactory.cs b/MongoDB.Net-Tests/Connections/TestConnectionFactory.cs deleted file mode 100644 index ed581018..00000000 --- a/MongoDB.Net-Tests/Connections/TestConnectionFactory.cs +++ /dev/null @@ -1,44 +0,0 @@ -using System; - -using NUnit.Framework; - -namespace MongoDB.Driver.Connections -{ - [TestFixture] - public class TestConnectionFactory - { - [TearDown] - public void TearDown (){ - ConnectionFactory.Shutdown (); - } - - [Test] - public void TestGetConnection (){ - var connection1 = ConnectionFactory.GetConnection (string.Empty); - var connection2 = ConnectionFactory.GetConnection (string.Empty); - Assert.IsNotNull (connection1); - Assert.IsNotNull (connection2); - Assert.AreEqual (1, ConnectionFactory.PoolCount); - } - - [Test] - public void TestCreatePoolForEachUniqeConnectionString (){ - ConnectionFactory.GetConnection (string.Empty); - ConnectionFactory.GetConnection (string.Empty); - ConnectionFactory.GetConnection ("Username=test"); - ConnectionFactory.GetConnection ("Username=test"); - ConnectionFactory.GetConnection ("Server=localhost"); - Assert.AreEqual (3, ConnectionFactory.PoolCount); - } - - [Test] - public void TestExceptionWhenMinimumPoolSizeIsGreaterThenMaximumPoolSize (){ - try{ - ConnectionFactory.GetConnection("MinimumPoolSize=50; MaximumPoolSize=10"); - }catch(ArgumentException){ - }catch(Exception){ - Assert.Fail("Wrong exception thrown"); - } - } - } -} diff --git a/MongoDB.Net-Tests/IO/TestQueryMessage.cs b/MongoDB.Net-Tests/IO/TestQueryMessage.cs deleted file mode 100755 index 48c26123..00000000 --- a/MongoDB.Net-Tests/IO/TestQueryMessage.cs +++ /dev/null @@ -1,56 +0,0 @@ -using System; -using System.IO; -using MongoDB.Driver.Protocol; -using NUnit.Framework; - -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.IO -{ - [TestFixture] - public class TestQueryMessage - { - [Test] - public void TestAllBytesWritten() - { - Document query = new Document(); - query.Add("col1", 1); - - QueryMessage msg = new QueryMessage(query,"TestDB.TestCol"); - MemoryStream buffer = new MemoryStream(); - msg.Write(buffer); - - Byte[] output = buffer.ToArray(); - String hexdump = BitConverter.ToString(output); - //Console.WriteLine("Dump: " + hexdump); - - Assert.IsTrue(output.Length > 0); - Assert.AreEqual("3A-00-00-00-00-00-00-00-00-00-00-00-D4-07-00-00-00-00-00-00-54-65-73-74-44-42-2E-54-65-73-74-43-6F-6C-00-00-00-00-00-00-00-00-00-0F-00-00-00-10-63-6F-6C-31-00-01-00-00-00-00", hexdump); - - } - - [Test] - public void TestWriteMessageTwice(){ - string expectedHex = "3A-00-00-00-00-00-00-00-00-00-00-00-D4-07-00-00-00-00-00-00-54-65-73-74-44-42-2E-54-65-73-74-43-6F-6C-00-00-00-00-00-00-00-00-00-0F-00-00-00-10-63-6F-6C-31-00-01-00-00-00-00"; - Document query = new Document(); - query.Add("col1", 1); - - QueryMessage msg = new QueryMessage(query,"TestDB.TestCol"); - MemoryStream buffer = new MemoryStream(); - msg.Write(buffer); - - Byte[] output = buffer.ToArray(); - String hexdump = BitConverter.ToString(output); - - MemoryStream buffer2 = new MemoryStream(); - msg.Write(buffer2); - - Byte[] output2 = buffer.ToArray(); - String hexdump2 = BitConverter.ToString(output2); - - Assert.AreEqual(expectedHex,hexdump); - Assert.AreEqual(hexdump,hexdump2); - - } - } -} \ No newline at end of file diff --git a/MongoDB.Net-Tests/MongoDB.Driver.Tests.csproj b/MongoDB.Net-Tests/MongoDB.Driver.Tests.csproj deleted file mode 100644 index 57e60b3d..00000000 --- a/MongoDB.Net-Tests/MongoDB.Driver.Tests.csproj +++ /dev/null @@ -1,150 +0,0 @@ - - - - Debug - AnyCPU - 9.0.30729 - 2.0 - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02} - Library - MongoDB.Driver.Tests - - - 2.0 - - - publish\ - true - Disk - false - Foreground - 7 - Days - false - false - true - 0 - 1.0.0.%2a - false - false - true - v3.5 - MongoDB.Driver.Tests - C:\Documents and Settings\scorder\Application Data\ICSharpCode/SharpDevelop3.0\Settings.SourceAnalysis - False - False - false - - - true - full - false - bin\Debug - DEBUG - prompt - 4 - - - none - false - bin\Release - prompt - 4 - - - False - - - False - Auto - 4194304 - AnyCPU - 4096 - - - - - - 3.5 - - - - - False - ..\redist\nunit.framework.dll - - - - - - - - - - - - - - - - - - - - - - - - Always - - - - - - - - - - - - - - - - - - - - - False - - - False - - - False - - - False - - - False - - - - - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver - - - - - - Always - - - - - - - \ No newline at end of file diff --git a/MongoDB.Net-Tests/MongoDB.Driver.Tests.dll.config b/MongoDB.Net-Tests/MongoDB.Driver.Tests.dll.config deleted file mode 100644 index 26ef9e06..00000000 --- a/MongoDB.Net-Tests/MongoDB.Driver.Tests.dll.config +++ /dev/null @@ -1,18 +0,0 @@ - - - -
- - - - - - - - - - - - - - \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestAuthentication.cs b/MongoDB.Net-Tests/TestAuthentication.cs deleted file mode 100644 index b9c56880..00000000 --- a/MongoDB.Net-Tests/TestAuthentication.cs +++ /dev/null @@ -1,119 +0,0 @@ -/* - * User: Sedward - */ -using System; -using MongoDB.Driver.Connections; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture(Description = "Requires start server with --auth")] - public class TestAuthentication - { - private const int AuthServerPort = MongoServerEndPoint.DefaultPort + 3; - const String testDatabaseName = "testAuth"; - const String testuser = "testuser"; - const String testpass = "test1234"; - - const String adminuser = "adminuser"; - const String adminpass = "admin1234"; - - [TestFixtureSetUp] - public void SetUp() - { - using(var mongo = new Mongo(CreateConnectionStringBuilder().ToString())) - { - mongo.Connect(); - - var testDatabase = mongo[testDatabaseName]; - if(testDatabase.MetaData.FindUser(testuser) == null) - testDatabase.MetaData.AddUser(testuser, testpass); - - var adminDatabase = mongo["admin"]; - if(adminDatabase.MetaData.FindUser(adminuser) == null) - adminDatabase.MetaData.AddUser(adminuser, adminpass); - } - } - - [Test] - public void TestLoginGoodPassword() - { - using(var mongo = ConnectAndAuthenticatedMongo(testuser, testpass)) - TryInsertData(mongo); - } - - [Test] - [ExpectedException(typeof(MongoException))] - public void TestLoginBadPassword() - { - using(var mongo = ConnectAndAuthenticatedMongo(testuser, "badpassword")) - TryInsertData(mongo); - } - - [Test] - public void TestAuthenticatedInsert(){ - using(var mongo = ConnectAndAuthenticatedMongo(testuser, testpass)) - TryInsertData(mongo); - } - - [Test] - [ExpectedException(typeof(MongoOperationException))] - public void TestUnauthenticatedInsert(){ - using(var mongo = new Mongo(CreateConnectionStringBuilder().ToString())) - { - mongo.Connect(); - - TryInsertData(mongo); - } - } - - private static Mongo ConnectAndAuthenticatedMongo(string username,string password) - { - var builder = CreateConnectionStringBuilder(); - builder.Username = username; - builder.Password = password; - var mongo = new Mongo(builder.ToString()); - mongo.Connect(); - return mongo; - } - - private static MongoConnectionStringBuilder CreateConnectionStringBuilder() - { - var builder = new MongoConnectionStringBuilder(); - builder.AddServer("localhost", AuthServerPort); - return builder; - } - - private static void TryInsertData(Mongo mongo) - { - var collection = mongo[testDatabaseName]["testCollection"]; - collection.Delete(new Document(),true); - collection.Insert(new Document().Append("value", 84),true); - - var value = collection.FindOne(new Document().Append("value", 84)); - - Assert.AreEqual(84, value["value"]); - } - - [TestFixtureTearDown] - public void TestTearDown(){ - /* - * In case clean up fails open a Mongo shell and execute the following commands - * use admin - * db.auth("adminuser", "admin1234"); - * db.system.users.find(); //should see adminuser - * db.system.users.remove({user:"adminuser"}); - * db.system.users.find(); //should not see adminuser or any other. - * Tests should now run. - */ - using(var mongo = ConnectAndAuthenticatedMongo(adminuser, adminuser)) - { - mongo[testDatabaseName].MetaData.RemoveUser(testuser); - mongo["admin"].MetaData.RemoveUser(adminuser); - } - - // clean connections - ConnectionFactory.Shutdown(); - } - } -} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestB.cs b/MongoDB.Net-Tests/TestB.cs deleted file mode 100644 index 49354316..00000000 --- a/MongoDB.Net-Tests/TestB.cs +++ /dev/null @@ -1,42 +0,0 @@ - -using System; -using System.IO; -using System.Net; -using System.Net.Sockets; - - -using NUnit.Framework; - -namespace MongoDB.Driver -{ - - - [TestFixture()] - public class TestB - { - - [Test()] - public void TestCase(){ - TcpClient client = new TcpClient(); - client.Connect("localhost", 27017); - BufferedStream buff = new BufferedStream(client.GetStream()); - BinaryWriter writer = new BinaryWriter(buff); - - System.Text.UTF8Encoding encoding=new System.Text.UTF8Encoding(); - Byte[] msg = encoding.GetBytes("Hello MongoDB!"); - - - writer.Write(16 + msg.Length + 1); - writer.Write(1); - writer.Write(1); - writer.Write(1000); - writer.Write(msg); - writer.Write((byte)0); - - writer.Flush(); - writer.Close(); - client.Close(); - - } - } -} diff --git a/MongoDB.Net-Tests/TestBinary.cs b/MongoDB.Net-Tests/TestBinary.cs deleted file mode 100644 index dd94a4fc..00000000 --- a/MongoDB.Net-Tests/TestBinary.cs +++ /dev/null @@ -1,16 +0,0 @@ - -using System; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture()] - public class TestBinary - { - - [Test] - public void TestToString(){ - Binary b = new Binary(); - } - } -} diff --git a/MongoDB.Net-Tests/TestCollection.cs b/MongoDB.Net-Tests/TestCollection.cs deleted file mode 100755 index 2cf8849b..00000000 --- a/MongoDB.Net-Tests/TestCollection.cs +++ /dev/null @@ -1,398 +0,0 @@ -using System; -using System.Collections.Generic; - -using NUnit.Framework; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestCollection : MongoTestBase - { - private string pound = "\u00a3"; - - public override string TestCollections { - get { - return "inserts,updates,counts,counts_spec,finds,charreads"; - } - } - - public override void OnInit (){ - IMongoCollection finds = DB["finds"]; - for(int j = 1; j < 100; j++){ - finds.Insert(new Document(){{"x", 4},{"h", "hi"},{"j", j}}); - } - for(int j = 100; j < 105; j++){ - finds.Insert(new Document(){{"x", 4},{"n", 1},{"j", j}}); - } - IMongoCollection charreads = DB["charreads"]; - charreads.Insert(new Document(){{"test", "1234" + pound + "56"}}); - - } - - - [Test] - public void TestFindOne(){ - Document query = new Document(); - query["j"] = 10; - Document result = DB["finds"].FindOne(query); - Assert.IsNotNull(result); - Assert.AreEqual(4, result["x"]); - Assert.AreEqual(10, result["j"]); - - } - - [Test] - public void TestFindOneNotThere(){ - Document query = new Document(); - query["not_there"] = 10; - Document result = DB["finds"].FindOne(query); - Assert.IsNull(result); - } - - [Test] - public void TestFindNulls(){ - Document query = new Document().Append("n",DBNull.Value); - long numnulls = DB["finds"].Count(query); - Assert.AreEqual(99,numnulls); - } - - [Test] - public void TestFindAttributeLimit(){ - Document query = new Document(); - query["j"] = 10; - Document fields = new Document(); - fields["x"] = 1; - - ICursor c = DB["finds"].Find(query,-1,0,fields); - foreach(Document result in c.Documents){ - Assert.IsNotNull(result); - Assert.AreEqual(4, result["x"]); - Assert.IsNull(result["j"]); - } - } - - [Test] - public void TestFindGTRange(){ - Document query = new Document(); - query["j"] = new Document().Append("$gt",20); - - ICursor c = DB["finds"].Find(query); - foreach(Document result in c.Documents){ - Assert.IsNotNull(result); - Object j = result["j"]; - Assert.IsTrue(Convert.ToDouble(j) > 20); - } - } - - [Test] - public void TestManualWhere(){ - Document query = new Document().Append("$where", new Code("this.j % 2 == 0")); - ICursor c = DB["finds"].Find(query); - foreach(Document result in c.Documents){ - Assert.IsNotNull(result); - Object j = result["j"]; - Assert.IsTrue(Convert.ToInt32(j) % 2 == 0); - } - } - [Test] - public void TestFindWhereEquivalency(){ - IMongoCollection col = DB["finds"]; - Document lt = new Document().Append("j", new Document().Append("$lt", 5)); - string where = "this.j < 5"; - Document explicitWhere = new Document().Append("$where", new Code(where)); - CodeWScope func = new CodeWScope("function() { return this.j < 5; }", new Document()); - Document funcDoc = new Document().Append("$where", func); - - Assert.AreEqual(4, CountDocs(col.Find(lt)), "Basic find didn't return 4 docs"); - Assert.AreEqual(4, CountDocs(col.Find(where)), "String where didn't return 4 docs"); - Assert.AreEqual(4, CountDocs(col.Find(explicitWhere)), "Explicit where didn't return 4 docs"); - Assert.AreEqual(4, CountDocs(col.Find(funcDoc)), "Function where didn't return 4 docs"); - } - - private int CountDocs(ICursor cur){ - int cnt = 0; - foreach(Document doc in cur.Documents){ - cnt++; - } - return cnt; - } - [Test] - public void TestWhere(){ - ICursor c = DB["finds"].Find("this.j % 2 == 0"); - foreach(Document result in c.Documents){ - Assert.IsNotNull(result); - Object j = result["j"]; - Assert.IsTrue(Convert.ToInt32(j) % 2 == 0); - } - } - - [Test] - public void TestFindOneObjectContainingUKPound(){ - Document query = new Document(); - Document result = DB["charreads"].FindOne(query); - Assert.IsNotNull(result); - Assert.IsTrue(result.Contains("test")); - Assert.AreEqual("1234£56",result["test"]); - } - - [Test] - public void TestSimpleInsert(){ - IMongoCollection inserts = DB["inserts"]; - Document indoc = new Document(); - indoc["song"] = "Palmdale"; - indoc["artist"] = "Afroman"; - indoc["year"] = 1999; - - inserts.Insert(indoc); - - Document result = inserts.FindOne(new Document().Append("song","Palmdale")); - Assert.IsNotNull(result); - Assert.AreEqual(1999,result["year"]); - } - - [Test] - public void TestReallySimpleInsert(){ - IMongoCollection inserts = DB["inserts"]; - Document indoc = new Document(); - indoc["y"] = 1; - indoc["x"] = 2; - inserts.Insert(indoc); - - Document result = inserts.FindOne(new Document().Append("x",2)); - Assert.IsNotNull(result); - Assert.AreEqual(1,result["y"]); - } - - [Test] - public void TestPoundSymbolInsert(){ - IMongoCollection inserts = DB["inserts"]; - Document indoc = new Document().Append("x","1234" + pound + "56").Append("y",1);; - inserts.Insert(indoc); - - Document result = inserts.FindOne(new Document().Append("x","1234" + pound + "56")); - Assert.IsNotNull(result); - Assert.AreEqual(1,result["y"]); - } - - [Test] - public void TestArrayInsert(){ - IMongoCollection inserts = DB["inserts"]; - Document indoc1 = new Document(); - indoc1["song"] = "The Axe"; - indoc1["artist"] = "Tinsley Ellis"; - indoc1["year"] = 2006; - - Document indoc2 = new Document(); - indoc2["song"] = "The Axe2"; - indoc2["artist"] = "Tinsley Ellis2"; - indoc2["year"] = 2008; - - inserts.Insert(new Document[]{indoc1,indoc2}); - - Document result = inserts.FindOne(new Document().Append("song","The Axe")); - Assert.IsNotNull(result); - Assert.AreEqual(2006,result["year"]); - - result = inserts.FindOne(new Document().Append("song","The Axe2")); - Assert.IsNotNull(result); - Assert.AreEqual(2008,result["year"]); - } - - [Test] - public void TestInsertOfArray(){ - OidGenerator ogen = new OidGenerator(); - IMongoCollection inserts = DB["inserts"]; - Document album = new Document(); - album["_id"] = ogen.Generate(); - album["artist"] = "Popa Chubby"; - album["title"] = "Deliveries After Dark"; - album["songs"] = new[] { - new Document().Append("title", "Let The Music Set You Free").Append("length", "5:15").Append("_id", ogen.Generate()), - new Document().Append("title", "Sally Likes to Run").Append("length", "4:06").Append("_id", ogen.Generate()), - new Document().Append("title", "Deliveries After Dark").Append("length", "4:17").Append("_id", ogen.Generate()), - new Document().Append("title", "Theme From The Godfather").Append("length", "3:06").Append("_id", ogen.Generate()), - new Document().Append("title", "Grown Man Crying Blues").Append("length", "8:09").Append("_id", ogen.Generate()), - }; - inserts.Insert(album); - - Document result = inserts.FindOne(new Document().Append("songs.title","Deliveries After Dark")); - Assert.IsNotNull(result); - - Assert.AreEqual(album.ToString(), result.ToString()); - } - - [Test] - public void TestInsertLargerThan4MBDocument(){ - Binary b = new Binary(new byte[1024 * 1024]); - Document big = new Document(){{"name", "Big Document"}, {"b1", b}, {"b2", b}, {"b3", b}, {"b4", b}}; - IMongoCollection inserts = DB["inserts"]; - bool thrown = false; - try{ - inserts.Insert(big); - }catch(MongoException){ - thrown = true; - }catch(Exception e){ - Assert.Fail("Wrong Exception thrown " + e.GetType().Name); - } - Assert.IsTrue(thrown, "Shouldn't be able to insert large document"); - } - - [Test] - public void TestInsertBulkLargerThan4MBOfDocuments(){ - Binary b = new Binary(new byte[1024 * 1024 * 2]); - IMongoCollection inserts = DB["inserts"]; - try{ - Document[] docs = new Document[10]; - //6MB+ of documents - for(int x = 0; x < docs.Length; x++){ - docs[x] = new Document(){{"name", "bulk"}, {"b", b}, {"x", x}}; - } - inserts.Insert(docs,true); - long count = inserts.Count(new Document(){{"name", "bulk"}}); - Assert.AreEqual(docs.Length, count, "Wrong number of documents inserted"); - }catch(MongoException){ - Assert.Fail("MongoException should not have been thrown."); - } - } - - [Test] - public void TestDelete(){ - IMongoCollection deletes = DB["deletes"]; - Document doc = new Document(); - doc["y"] = 1; - doc["x"] = 2; - deletes.Insert(doc); - - Document selector = new Document().Append("x",2); - - Document result = deletes.FindOne(selector); - Assert.IsNotNull(result); - Assert.AreEqual(1,result["y"]); - - deletes.Delete(selector); - result = deletes.FindOne(selector); - Assert.IsNull(result,"Shouldn't have been able to find a document that was deleted"); - - } - - [Test] - public void TestUpdateUpsertNotExisting(){ - IMongoCollection updates = DB["updates"]; - Document doc = new Document(); - doc["First"] = "Sam"; - doc["Last"] = "CorderNE"; - - updates.Update(doc); - Document selector = new Document().Append("Last", "CorderNE"); - Document result = updates.FindOne(selector); - Assert.IsNotNull(result); - Assert.AreEqual("Sam", result["First"]); - } - - [Test] - public void TestUpdateUpsertExisting(){ - IMongoCollection updates = DB["updates"]; - Document doc = new Document(); - doc["First"] = "Mtt"; - doc["Last"] = "Brewer"; - - updates.Insert(doc); - - Document selector = new Document().Append("Last", "Brewer"); - doc = updates.FindOne(selector); - Assert.IsNotNull(doc); - Assert.AreEqual("Mtt", doc["First"]); - Assert.IsNotNull(doc["_id"]); - - doc["First"] = "Matt"; - updates.Update(doc); - - Document result = updates.FindOne(selector); - Assert.IsNotNull(result); - Assert.AreEqual("Matt", result["First"]); - - } - - [Test] - public void TestUpdateMany(){ - IMongoCollection updates = DB["updates"]; - - updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam")); - updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam2")); - updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam3")); - - Document selector = new Document().Append("Last", "Cordr"); - ICursor results = updates.Find(selector); - bool found = false; - foreach(Document doc in results.Documents){ - Assert.AreEqual("Cordr", doc["Last"]); - found = true; - } - Assert.IsTrue(found,"Should have found docs inserted for TestUpdateMany"); - Assert.AreEqual(3, updates.Count(selector), "Didn't find all Documents inserted for TestUpdateMany with Selector"); - - //Document updateData = new Document().Append("$set", new Document().Append("Last", "Corder2")); - Document updateData = new Document().Append("Last", "Corder2"); - updates.UpdateAll(updateData, selector); - - selector["Last"] = "Corder2"; - Assert.AreEqual(3, updates.Count(selector), "Not all Cordr documents were updated"); - - results = updates.Find(selector); - found = false; - foreach(Document doc in results.Documents){ - Assert.AreEqual("Corder2", doc["Last"]); - Assert.IsNotNull(doc["First"],"First name should not disappear"); - found = true; - } - Assert.IsTrue(found,"Should have found docs updated for TestMany"); - } - - [Test] - public void TestUpdatePartial(){ - IMongoCollection updates = DB["updates"]; - int coolness = 5; - Document einstein = new Document(){{"Last", "Einstien"},{"First", "Albert"},{"Coolness",coolness++}}; - updates.Insert(einstein); - Document selector = new Document(){{"_id", einstein["_id"]}}; - - updates.Update(new Document(){{"$inc", new Document(){{"Coolness", 1}}}}, selector); - Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector)["Coolness"]), "Coolness field not incremented", true); - - updates.Update(new Document(){{"$set",new Document(){{"Last", "Einstein"}}}, - {"$inc",new Document(){{"Coolness",1}}}},selector,true); - Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector)["Coolness"]), "Coolness field not incremented"); - } - - [Test] - public void TestCount(){ - IMongoCollection counts = DB["counts"]; - int top = 100; - for(int i = 0; i < top; i++){ - counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", i)); - } - long cnt = counts.Count(); - Assert.AreEqual(top,cnt, "Count not the same as number of inserted records"); - } - - [Test] - public void TestCountWithSpec(){ - IMongoCollection counts = DB["counts_spec"]; - counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", 1)); - counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", 2)); - counts.Insert(new Document().Append("Last", "Corder").Append("First","Sam").Append("cnt", 3)); - - Assert.AreEqual(2, counts.Count(new Document().Append("Last", "Cordr"))); - Assert.AreEqual(1, counts.Count(new Document().Append("Last", "Corder"))); - Assert.AreEqual(0, counts.Count(new Document().Append("Last", "Brown"))); - - } - - [Test] - public void TestCountInvalidCollection(){ - IMongoCollection counts = DB["counts_wtf"]; - Assert.AreEqual(0, counts.Count()); - } - } -} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestConnectionStringBuilder.cs b/MongoDB.Net-Tests/TestConnectionStringBuilder.cs deleted file mode 100644 index 44acdda7..00000000 --- a/MongoDB.Net-Tests/TestConnectionStringBuilder.cs +++ /dev/null @@ -1,85 +0,0 @@ -using System; -using System.Collections.Generic; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestConnectionStringBuilder - { - [Test] - public void TestCreateEmptyInstance (){ - new MongoConnectionStringBuilder (); - } - - [Test] - public void TestDefaults (){ - var builder = new MongoConnectionStringBuilder (); - Assert.IsNull (builder.Username); - Assert.IsNull (builder.Password); - Assert.AreEqual (builder.MaximumPoolSize, MongoConnectionStringBuilder.DefaultMaximumPoolSize); - Assert.AreEqual (builder.MinimumPoolSize, MongoConnectionStringBuilder.DefaultMinimumPoolSize); - Assert.AreEqual (builder.ConnectionLifetime, MongoConnectionStringBuilder.DefaultConnectionLifeTime); - Assert.AreEqual (builder.ConnectionTimeout, MongoConnectionStringBuilder.DefaultConnectionTimeout); - Assert.AreEqual (builder.Pooled, MongoConnectionStringBuilder.DefaultPooled); - - var servers = new List (builder.Servers); - Assert.AreEqual (1, servers.Count); - Assert.AreEqual (MongoServerEndPoint.DefaultPort, servers[0].Port); - Assert.AreEqual (MongoServerEndPoint.DefaultHost, servers[0].Host); - } - - [Test] - public void TestConnectionStringParsing (){ - var builder = new MongoConnectionStringBuilder ("Username=testuser;Password=testpassword;Server=testserver:555;ConnectionLifetime=50;MaximumPoolSize=101;MinimumPoolSize=202;Pooled=false"); - Assert.AreEqual ("testuser", builder.Username); - Assert.AreEqual ("testpassword", builder.Password); - Assert.AreEqual (101, builder.MaximumPoolSize); - Assert.AreEqual (202, builder.MinimumPoolSize); - Assert.AreEqual (TimeSpan.FromSeconds (50), builder.ConnectionLifetime); - Assert.AreEqual (false, builder.Pooled); - - var servers = new List(builder.Servers); - Assert.AreEqual (1, servers.Count); - Assert.AreEqual ("testserver", servers[0].Host); - Assert.AreEqual (555, servers[0].Port); - } - - [Test] - public void TestConnectionStringParsingServerWithoutPort (){ - var builder = new MongoConnectionStringBuilder ("Username=testuser;Password=testpassword;Server=testserver"); - Assert.AreEqual ("testuser", builder.Username); - Assert.AreEqual ("testpassword", builder.Password); - - var servers = new List (builder.Servers); - Assert.AreEqual (1, servers.Count); - Assert.AreEqual ("testserver", servers[0].Host); - Assert.AreEqual (MongoServerEndPoint.DefaultPort, servers[0].Port); - } - - [Test] - public void TestToStringOutput (){ - var builder = new MongoConnectionStringBuilder { Password = "testpassword", Username = "testusername", ConnectionLifetime = TimeSpan.FromSeconds (50), MaximumPoolSize = 101, MinimumPoolSize = 202, ConnectionTimeout = TimeSpan.FromSeconds(60)}; - builder.AddServer ("testserver1", 555); - builder.AddServer ("testserver2"); - - Assert.AreEqual ("Username=testusername;Password=testpassword;Server=testserver1:555,testserver2;MaximumPoolSize=101;MinimumPoolSize=202;ConnectionTimeout=60;ConnectionLifetime=50", builder.ToString ()); - } - - [Test] - public void TestToStringOutputWithoutUsernameAndPassword (){ - var builder = new MongoConnectionStringBuilder (); - builder.AddServer ("testserver", 555); - - Assert.AreEqual ("Server=testserver:555", builder.ToString ()); - } - - [Test] - public void TestToStringOutputWithDefaultServerPort (){ - var builder = new MongoConnectionStringBuilder (); - builder.AddServer ("testserver"); - Assert.AreEqual ("Server=testserver", builder.ToString ()); - } - - } -} diff --git a/MongoDB.Net-Tests/TestCursor.cs b/MongoDB.Net-Tests/TestCursor.cs deleted file mode 100644 index 16de1f19..00000000 --- a/MongoDB.Net-Tests/TestCursor.cs +++ /dev/null @@ -1,142 +0,0 @@ -using System; -using NUnit.Framework; - -using MongoDB.Driver; -using MongoDB.Driver.IO; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestCursor : MongoTestBase - { - public override string TestCollections { - get { - return "sorts,hintindex,smallreads,reads"; - } - } - - public override void OnInit (){ - //smallreads - IMongoCollection smallreads = DB["smallreads"]; - for(int j = 1; j < 5; j++){ - smallreads.Insert(new Document(){{"x", 4},{"j", j}}); - } - smallreads.Insert(new Document(){{"x", 4}, {"j", 5}, {"n", 1}}); - - IMongoCollection reads = DB["reads"]; - for(int j = 1; j < 10000; j++){ - reads.Insert(new Document(){{"x", 4},{"h", "hi"},{"j", j}}); - } - } - - - [Test] - public void TestCanReadSmall() - { - ICursor c = DB["smallreads"].FindAll(); - - Assert.IsNotNull(c,"Cursor shouldn't be null"); - int reads = 0; - foreach(Document doc in c.Documents){ - reads++; - } - Assert.IsTrue(reads > 0, "No documents were returned."); - Assert.AreEqual(5, reads, "More than 5 documents in the small reads dataset"); - } - - [Test] - public void TestCanReadMore(){ - ICursor c = DB["reads"].FindAll(); - - Assert.IsNotNull(c,"Cursor shouldn't be null"); - int reads = 0; - int idchanges = 0; - long id = 0; - foreach(Document doc in c.Documents){ - reads++; - if(c.Id != id){ - idchanges++; - id = c.Id; - } - } - Assert.IsTrue(reads > 0, "No documents were returned."); - Assert.IsTrue(idchanges > 0,String.Format("ReadMore message never sent. {0} changes seen", idchanges)); - Assert.AreEqual(9999,reads, "Not all documents returned."); - System.Console.Out.Write(String.Format("{0} records read", reads)); - - - } - [Test] - public void TestCanReadAndKillCursor() - { - ICursor c = DB["reads"].FindAll(); - - Assert.IsNotNull(c,"Cursor shouldn't be null"); - foreach(Document doc in c.Documents){ - break; - } - c.Dispose(); - Assert.AreEqual(0,c.Id); - } - - [Test] - public void TestCanLimit(){ - ICursor c = DB["reads"].FindAll().Limit(5); - - Assert.IsNotNull(c,"Cursor shouldn't be null"); - int reads = 0; - foreach(Document doc in c.Documents){ - reads++; - } - Assert.IsTrue(reads > 0, "No documents were returned."); - Assert.AreEqual(5, reads); - } - - [Test] - public void TestSort(){ - IMongoCollection sorts = DB["sorts"]; - int[] randoms = new int[]{4,6,8,9,1,3,2,5,7,0}; - foreach(int x in randoms){ - sorts.Insert(new Document().Append("x", randoms[x])); - } - Assert.AreEqual(randoms.Length, sorts.Count()); - - int exp = 0; - foreach(Document doc in sorts.FindAll().Sort("x", IndexOrder.Ascending).Documents){ - Assert.AreEqual(exp, Convert.ToInt32(doc["x"])); - exp++; - } - - exp = 9; - foreach(Document doc in sorts.FindAll().Sort("x", IndexOrder.Descending).Documents){ - Assert.AreEqual(exp, Convert.ToInt32(doc["x"])); - exp--; - } - } - - [Test] - public void TestExplain(){ - Document exp = DB["reads"].FindAll().Limit(5).Skip(5).Sort("x").Explain(); - Assert.IsTrue(exp.Contains("cursor")); - Assert.IsTrue(exp.Contains("n")); - Assert.IsTrue(exp.Contains("nscanned")); - } - - [Test] - public void TestHint(){ - IMongoCollection reads = DB["reads"]; - Document hint = new Document().Append("x",IndexOrder.Ascending); - - Document exp = reads.FindAll().Hint(hint).Explain(); - Assert.IsTrue(exp.Contains("$err"), "No error found"); - - reads.MetaData.CreateIndex("hintindex",hint,false); - exp = reads.FindAll().Hint(hint).Explain(); - - Assert.IsTrue(exp.Contains("cursor")); - Assert.IsTrue(exp.Contains("n")); - Assert.IsTrue(exp.Contains("nscanned")); - } - } -} diff --git a/MongoDB.Net-Tests/TestDBRef.cs b/MongoDB.Net-Tests/TestDBRef.cs deleted file mode 100644 index 478657b0..00000000 --- a/MongoDB.Net-Tests/TestDBRef.cs +++ /dev/null @@ -1,78 +0,0 @@ - - -using System; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestDBRef - { - [Test] - public void TestEqualsAreSameObject (){ - DBRef r = new DBRef ("tests", "2312314"); - Assert.AreEqual (r, r); - } - - [Test] - public void TestEqualsUsingSameValues (){ - String colname = "tests"; - String id = "32312312"; - DBRef r = new DBRef (colname, id); - DBRef r2 = new DBRef (colname, id); - - Assert.AreEqual (r, r2); - } - - [Test] - public void TestFromDocument (){ - String colname = "tests"; - String id = "32312312"; - Document doc = new Document ().Append (DBRef.RefName, colname).Append (DBRef.IdName, id); - - DBRef expected = new DBRef (colname, id); - Assert.AreEqual (expected, DBRef.FromDocument (doc)); - } - - [Test] - public void TestFromIncompleteDocumentThrowsArguementException (){ - bool thrown = false; - try { - DBRef.FromDocument (new Document ().Append (DBRef.RefName, "tests")); - } catch (ArgumentException) { - thrown = true; - } - Assert.IsTrue (thrown, "ArgumentException should have been thrown when trying to create convert from incomplete document"); - - } - - [Test] - public void TestIsDocumentDBRef (){ - Document doc = new Document (); - - Assert.IsFalse (DBRef.IsDocumentDBRef (null)); - Assert.IsFalse (DBRef.IsDocumentDBRef (doc)); - - doc[DBRef.RefName] = "tests"; - Assert.IsFalse (DBRef.IsDocumentDBRef (doc)); - - doc.Remove (DBRef.RefName); - doc[DBRef.IdName] = "12312131"; - Assert.IsFalse (DBRef.IsDocumentDBRef (doc)); - - doc[DBRef.RefName] = "tests"; - Assert.IsTrue (DBRef.IsDocumentDBRef (doc)); - - doc[DBRef.MetaName] = new Document(); - Assert.IsTrue (DBRef.IsDocumentDBRef (doc)); - } - - [Test] - public void TestCastsToDocument (){ - OidGenerator ogen = new OidGenerator (); - DBRef dref = new DBRef ("tests.dbrefs", ogen.Generate ()); - Document doc = (Document)dref; - Assert.AreEqual (dref.CollectionName, doc[DBRef.RefName]); - } - } -} diff --git a/MongoDB.Net-Tests/TestDatabase.cs b/MongoDB.Net-Tests/TestDatabase.cs deleted file mode 100644 index dc0cbb04..00000000 --- a/MongoDB.Net-Tests/TestDatabase.cs +++ /dev/null @@ -1,137 +0,0 @@ -using System; -using System.Collections.Generic; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestDatabase : MongoTestBase - { - public override string TestCollections { - get { - return "refs,noerror,errcol,preverror"; - } - } - - public override void OnInit () - { - base.OnInit(); - } - - [Test] - public void TestFollowReference(){ - IMongoCollection refs = DB["refs"]; - Oid id = new Oid("4a7067c30a57000000008ecb"); - string msg = "this has an oid key"; - Document doc = new Document(){{"_id", id},{"msg", msg}}; - refs.Insert(doc); - - DBRef rf = new DBRef("refs", id); - - Document target = DB.FollowReference(rf); - Assert.IsNotNull(target, "FollowReference returned null"); - Assert.IsTrue(target.Contains("msg")); - Assert.AreEqual(msg, target["msg"]); - } - - [Test] - public void TestFollowNonReference(){ - Oid id = new Oid("BAD067c30a57000000008ecb"); - DBRef rf = new DBRef("refs", id); - - Document target = DB.FollowReference(rf); - Assert.IsNull(target, "FollowReference returned wasn't null"); - } - - [Test] - public void TestReferenceNonOid(){ - IMongoCollection refs = DB["refs"]; - - Document doc = new Document().Append("_id",123).Append("msg", "this has a non oid key"); - refs.Insert(doc); - - DBRef rf = new DBRef("refs",123); - - Document recv = DB.FollowReference(rf); - - Assert.IsNotNull(recv); - Assert.IsTrue(recv.Contains("msg")); - Assert.AreEqual(recv["_id"], (long)123); - } - - [Test] - public void TestGetCollectionNames(){ - List names = DB.GetCollectionNames(); - Assert.IsNotNull(names,"No collection names returned"); - Assert.IsTrue(names.Count > 0); - Assert.IsTrue(names.Contains("tests.inserts")); - } - - [Test] - public void TestEvalNoScope(){ - Document result = DB.Eval("function(){return 3;}"); - Assert.AreEqual(3, result["retval"]); - } - - [Test] - public void TestEvalWithScope(){ - int val = 3; - Document scope = new Document().Append("x",val); - Document result = DB.Eval("function(){return x;}", scope); - Assert.AreEqual(val, result["retval"]); - } - - [Test] - public void TestEvalWithScopeAsFunctionParameters(){ - int x = 3; - int y = 4; - string func = "adder = function(a, b){return a + b;}; return adder(x,y)"; - Document scope = new Document().Append("x",x).Append("y", y); - Document result = DB.Eval(func, scope); - Console.Out.WriteLine(result.ToString()); - Assert.AreEqual(x + y, result["retval"]); - } - - [Test] - public void TestGetLastErrorNoError(){ - DB["noerror"].Insert(new Document(){{"a",1},{"b",2}}); - Document error = DB.GetLastError(); - Assert.AreEqual(DBNull.Value, error["err"]); - } - - [Test] - public void TestGetLastError(){ - IMongoCollection errcol = DB["errcol"]; - errcol.MetaData.CreateIndex(new Document(){{"x", IndexOrder.Ascending}}, true); - Document dup = new Document(){{"x",1},{"y",2}}; - errcol.Insert(dup); - Document error = DB.GetLastError(); - Assert.AreEqual(DBNull.Value, error["err"]); - - errcol.Insert(dup); - error = DB.GetLastError(); - - Assert.IsFalse(DBNull.Value == error["err"]); - - } - - [Test] - public void TestGetPrevError(){ - IMongoCollection col = DB["preverror"]; - col.MetaData.CreateIndex(new Document(){{"x", IndexOrder.Ascending}},true); - List docs = new List(); - for(int x = 0; x < 10; x++){ - docs.Add(new Document(){{"x",x},{"y",2}}); - } - docs.Add(new Document(){{"x",1},{"y",4}}); //the dupe - DB.ResetError(); - Assert.AreEqual(DBNull.Value, DB.GetLastError()["err"]); - - col.Insert(docs); - Document error = DB.GetLastError(); - - Assert.IsFalse(DBNull.Value == error["err"]); - - } - } -} diff --git a/MongoDB.Net-Tests/TestDatabaseJS.cs b/MongoDB.Net-Tests/TestDatabaseJS.cs deleted file mode 100644 index 5ec7eb92..00000000 --- a/MongoDB.Net-Tests/TestDatabaseJS.cs +++ /dev/null @@ -1,197 +0,0 @@ -using System; -using System.Collections.Generic; - -using NUnit.Framework; - -namespace MongoDB.Driver{ - - - [TestFixture()] - public class TestDatabaseJS : MongoTestBase - { - DatabaseJS js; - public override string TestCollections { - get { - return "jsreads"; - } - } - - public override void OnInit (){ - DB["system.js"].Delete(new Document()); - js = DB.JS; - - IMongoCollection jsreads = DB["jsreads"]; - for(int j = 1; j < 10; j++){ - jsreads.Insert(new Document(){{"j", j}}); - } - } - - [Test()] - public void TestCanGetDatabaseJSObject(){ - Assert.IsNotNull(DB.JS); - } - - [Test()] - public void TestCanGetAFunction(){ - string name = "fget"; - AddFunction(name); - Assert.IsNotNull(js[name]); - Assert.IsNotNull(js.GetFunction(name)); - } - - [Test()] - public void TestCanListFunctions(){ - string name = "flist"; - AddFunction(name); - List list = js.GetFunctionNames(); - Assert.IsTrue(list.Count > 0); - - bool found = false; - foreach(string l in list){ - if(l == name) found = true; - } - Assert.IsTrue(found, "Didn't find the function that was inserted."); - } - - [Test()] - public void TestCanAddAFunctionStrStr(){ - string name = "faddss"; - string func = "function(x, y){return x + y;}"; - js.Add(name,func); - Assert.IsNotNull(js[name]); - } - - [Test()] - public void TestCanAddAFunctionStrCode(){ - string name = "faddsc"; - Code func = new Code("function(x, y){return x + y;}"); - js.Add(name,func); - Assert.IsNotNull(js[name]); - } - - [Test()] - public void TestCanAddAFunctionDoc(){ - string name = "fadddoc"; - Code func = new Code("function(x, y){return x + y;}"); - Document doc = new Document().Append("_id", name).Append("value", func); - js.Add(doc); - Assert.IsNotNull(js[name]); - } - - [Test] - public void TestCannotAddAFunctionTwice(){ - string name = "faddtwice"; - Code func = new Code("function(x,y){return x + y;}"); - js.Add(name, func); - bool thrown = false; - try{ - js.Add(name, func); - }catch(ArgumentException){ - thrown = true; - } - Assert.IsTrue(thrown, "Shouldn't be able to add a function twice"); - } - - [Test] - public void TestCanAddFunctionByAssignment(){ - string name = "fassignadd"; - Code func = new Code("function(x,y){return x + y;}"); - Document doc = new Document().Append("_id", name).Append("value", func); - js[name] = doc; - Assert.IsNotNull(js[name]); - } - - [Test] - public void TestContains(){ - string name = "fcontains"; - AddFunction(name); - Assert.IsTrue(js.Contains(name)); - Assert.IsFalse(js.Contains("none")); - Assert.IsTrue(js.Contains(new Document().Append("_id", name).Append("value", new Code("dfs")))); - } - - [Test] - public void TestCopyTo(){ - int cnt = 5; - Document[] funcs = new Document[cnt]; - Code func = new Code("function(x,y){return x +y;}"); - - for(int i = 0; i < cnt; i++){ - string name = "_" + i + "fcopyTo"; - Document doc = new Document().Append("_id", name).Append("value", func); - js[name] = doc; - } - - js.CopyTo(funcs, 1); - Assert.IsNull(funcs[0]); - Assert.IsNotNull(funcs[1]); - Assert.IsNotNull(funcs[4]); - Assert.IsTrue(((string)funcs[1]["_id"]).StartsWith("_1")); //as long as no other _ named functions get in. - } - - [Test] - public void TestRemoveByName(){ - String name = "fremoven"; - AddFunction(name); - Assert.IsTrue(js.Contains(name)); - js.Remove(name); - Assert.IsFalse(js.Contains(name)); - } - - [Test] - public void TestRemoveByDoc(){ - String name = "fremoved"; - Document func = new Document().Append("_id", name); - AddFunction(name); - Assert.IsTrue(js.Contains(name)); - js.Remove(func); - Assert.IsFalse(js.Contains(name)); - } - - [Test] - public void TestForEach(){ - string name = "foreach"; - AddFunction(name); - bool found = false; - foreach(Document doc in js){ - if(name.Equals(doc["_id"]))found = true; - } - Assert.IsTrue(found, "Added function wasn't found during foreach"); - } - - [Test] - public void TestClear(){ - AddFunction("clear"); - Assert.IsTrue(js.Count > 0); - js.Clear(); - Assert.IsTrue(js.Count == 0); - } - - [Test] - public void TestExec(){ - js.Add("lt4", new Code("function(doc){return doc.j < 4;}")); - int cnt = 0; - foreach(Document doc in DB["reads"].Find("lt4(this)").Documents){ - cnt++; - } - Assert.AreEqual(3,cnt); - } - - [Test] - public void TestExecWithScope(){ - js.Add("lt", new Code("function(doc){ return doc.j < limit;}")); - int cnt = 0; - Document scope = new Document().Append("limit", 5); - Document query = new Document().Append("$where", new CodeWScope("lt(this)",scope)); - foreach(Document doc in DB["jsreads"].Find(query).Documents){ - cnt++; - } - Assert.AreEqual(4,cnt); - } - - protected void AddFunction(string name){ - Code func = new Code("function(x,y){return x + y;}"); - DB["system.js"].Insert(new Document().Append("_id", name).Append("value", func)); - } - } -} diff --git a/MongoDB.Net-Tests/TestDocument.cs b/MongoDB.Net-Tests/TestDocument.cs deleted file mode 100644 index 75a4ec00..00000000 --- a/MongoDB.Net-Tests/TestDocument.cs +++ /dev/null @@ -1,168 +0,0 @@ -/* - * User: scorder - * Date: 7/8/2009 - */ - -using System; -using System.Collections; - -using NUnit.Framework; - -using MongoDB.Driver; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestDocument - { - [Test] - public void TestValuesAdded() - { - Document d = new Document(); - d["test"] = 1; - Assert.AreEqual(1, d["test"]); - } - - [Test] - public void TestKeyOrderIsPreserved(){ - Document d = new Document(); - d["one"] = 1; - d.Add("two", 2); - d["three"] = 3; - int cnt = 1; - foreach(String key in d.Keys){ - Assert.AreEqual(cnt, d[key]); - cnt++; - } - } - [Test] - public void TestRemove(){ - Document d = new Document(); - d["one"] = 1; - d.Remove("one"); - Assert.IsFalse(d.Contains("one")); - } - - [Test] - public void TestKeyOrderPreservedOnRemove(){ - Document d = new Document(); - d["one"] = 1; - d["onepointfive"] = 1.5; - d.Add("two", 2); - d.Add("two.5", 2.5); - d.Remove("two.5"); - d["three"] = 3; - d.Remove("onepointfive"); - int cnt = 1; - foreach(String key in d.Keys){ - Assert.AreEqual(cnt, d[key]); - cnt++; - } - } - - [Test] - public void TestValues(){ - Document d = new Document(); - d["one"] = 1; - d.Add("two", 2); - d["three"] = 3; - ICollection vals = d.Values; - Assert.AreEqual(3, vals.Count); - - } - - [Test] - public void TestClearRemovesAll(){ - Document d = new Document(); - d["one"] = 1; - d.Add("two", 2); - d["three"] = 3; - Assert.AreEqual(3,d.Count); - d.Clear(); - Assert.AreEqual(0, d.Count); - Assert.IsNull(d["one"]); - Assert.IsFalse(d.Contains("one")); - } - - [Test] - public void TestCopyToCopiesAndPreservesKeyOrderToEmptyDoc(){ - Document d = new Document(); - Document dest = new Document(); - d["one"] = 1; - d.Add("two", 2); - d["three"] = 3; - d.CopyTo(dest); - int cnt = 1; - foreach(String key in dest.Keys){ - Assert.AreEqual(cnt, d[key]); - cnt++; - } - } - - [Test] - public void TestCopyToCopiesAndOverwritesKeys(){ - Document d = new Document(); - Document dest = new Document(); - dest["two"] = 200; - d["one"] = 1; - d.Add("two", 2); - d["three"] = 3; - d.CopyTo(dest); - Assert.AreEqual(2, dest["two"]); - } - - [Test] - public void TestTwoDocumentsWithSameContentInSameOrderAreEqual() { - Document d1 = new Document().Append("k1", "v1").Append("k2", "v2"); - Document d2 = new Document().Append("k1", "v1").Append("k2", "v2"); - AreEqual(d1, d2); - } - - [Test] - public void TestTwoDocumentsWithSameContentInDifferentOrderAreNotEqual() { - Document d1 = new Document().Append("k1", "v1").Append("k2", "v2"); - Document d2 = new Document().Append("k2", "v2").Append("k1", "v1"); - AreNotEqual(d1, d2); - } - - [Test] - public void TestTwoDocumentsWithSameArrayContentAreEqual() { - Document d1 = new Document().Append("k1", new string[] { "v1", "v2" }); - Document d2 = new Document().Append("k1", new string[] { "v1", "v2" }); - AreEqual(d1, d2); - } - - [Test] - public void TestTwoDocumentsWithMisorderedArrayContentAreNotEqual() { - Document d1 = new Document().Append("k1", new string[] { "v1", "v2" }); - Document d2 = new Document().Append("k1", new string[] { "v2", "v1" }); - AreNotEqual(d1, d2); - } - - [Test] - public void TestTwoDocumentsWithSameDocumentChildTreeAreEqual() { - Document d1 = new Document().Append("k1", new Document().Append("k2",new Document().Append("k3","foo"))); - Document d2 = new Document().Append("k1", new Document().Append("k2", new Document().Append("k3", "foo"))); - AreEqual(d1, d2); - } - - [Test] - public void TestTwoDocumentsWithDifferentDocumentChildTreeAreNotEqual() { - Document d1 = new Document().Append("k1", new Document().Append("k2", new Document().Append("k3", "foo"))); - Document d2 = new Document().Append("k1", new Document().Append("k2", new Document().Append("k3", "bar"))); - AreNotEqual(d1, d2); - } - - private void AreEqual(Document d1, Document d2) { - if (!d1.Equals(d2)) { - Assert.Fail(string.Format("Documents don't match\r\nExpected: {0}\r\nActual: {1}", d1, d2)); - } - } - - private void AreNotEqual(Document d1, Document d2) { - if (d1.Equals(d2)) { - Assert.Fail(string.Format("Documents match\r\nExpected: not {0}\r\nActual: {1}", d1, d2)); - } - } - } -} diff --git a/MongoDB.Net-Tests/TestMapReduce.cs b/MongoDB.Net-Tests/TestMapReduce.cs deleted file mode 100644 index 0145fa03..00000000 --- a/MongoDB.Net-Tests/TestMapReduce.cs +++ /dev/null @@ -1,58 +0,0 @@ - -using System; -using NUnit.Framework; - -namespace MongoDB.Driver -{ - [TestFixture()] - public class TestMapReduce : MongoTestBase - { - IMongoCollection mrcol; - string mapfunction = "function(){\n" + - " this.tags.forEach(\n" + - " function(z){\n" + - " emit( z , { count : 1 } );\n" + - " });\n" + - "};"; - string reducefunction = "function( key , values ){\n" + - " var total = 0;\n" + - " for ( var i=0; i lower); - } - - [Test] - public void TestOidFromBytes(){ - byte[] bytes = new byte[]{1,2,3,4,5,6,7,8,9,10,11,12}; - string hex = "0102030405060708090a0b0c"; - - Oid bval = new Oid(bytes); - Oid sval = new Oid(hex); - Assert.AreEqual(bval, sval); - } - - [Test] - public void TestNullValue(){ - bool thrown = false; - try{ - new Oid(String.Empty); - }catch(Exception){ - thrown = true; - } - Assert.IsTrue(thrown,"Null value exception not thrown"); - } - - [Test] - public void TestCtor(){ - bool thrown = false; - try{ - new Oid("4a7067c30a57000000008ecb"); - }catch(ArgumentException){ - thrown = true; - } - Assert.IsFalse(thrown,"ID should be fine."); - } - - [Test] - public void TestDecode(){ - string hex = "4a7067c30a57000000008ecb"; - Oid oid = new Oid(hex); - - Assert.AreEqual("\"" + hex + "\"", oid.ToString()); - } - - [Test] - public void TestEquals(){ - string hex = "4a7067c30a57000000008ecb"; - Assert.AreEqual(new Oid(hex), new Oid(hex)); - - } - [Test] - public void TestNotEquals(){ - string hex = "4a7067c30a57000000008ecb"; - string hex2 = "4a7067c30a57000000008ecc"; - Assert.AreNotEqual(new Oid(hex), new Oid(hex2)); - - } - - [Test] - public void TestDate(){ - string hex = "4B458B95D114BE541B000000"; - Oid oid = new Oid(hex); - //Expected: 2010-01-07 02:24:56.633 - DateTime expected = new DateTime(2010,1,7,7,21,57,DateTimeKind.Utc); - Assert.AreEqual(expected,oid.Created); - } - - [Test] - public void TestToByteArray(){ - byte[] bytes = new byte[]{1,2,3,4,5,6,7,8,9,10,11,12}; - string hex = "0102030405060708090a0b0c"; - - Oid bval = new Oid(bytes); - byte[] bytes2 = bval.ToByteArray(); - - Assert.IsNotNull(bytes2); - Assert.AreEqual(12, bytes2.Length); - Assert.AreEqual(bytes, bytes2); - } - - [Test] - public void TestNewOidFromToString(){ - var hex = "4B458B95D114BE541B000000"; - var firstOid = new Oid(hex); - var secondOid = new Oid(firstOid.ToString()); - - Assert.AreEqual(firstOid.ToString(), secondOid.ToString()); - } - } -} diff --git a/MongoDBDriver/.gitignore b/MongoDBDriver/.gitignore deleted file mode 100644 index 4d42fe72..00000000 --- a/MongoDBDriver/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -obj/* -*.xml -*.dll -*.mdb -*.pidb -*.csproj diff --git a/MongoDBDriver/Binary.cs b/MongoDBDriver/Binary.cs deleted file mode 100644 index 927906f5..00000000 --- a/MongoDBDriver/Binary.cs +++ /dev/null @@ -1,31 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - public class Binary{ - public enum TypeCode:byte{ - Unknown = 0, - General = 2, - // Uuid is now replaced by Guid - //Uuid = 3, - Md5 = 5, - UserDefined = 80 - } - - public byte[] Bytes{get;set;} - - public TypeCode Subtype{get;set;} - - public Binary() { } - - public Binary(byte[] value){ - Bytes = value; - Subtype = TypeCode.General; - } - - public override string ToString (){ - return String.Format(@"{{ ""$binary"": ""{0}"", ""$type"" : {1} }}", - Convert.ToBase64String(Bytes), (int)Subtype); - } - } -} diff --git a/MongoDBDriver/Bson/BsonReader.cs b/MongoDBDriver/Bson/BsonReader.cs deleted file mode 100644 index 0b09bb07..00000000 --- a/MongoDBDriver/Bson/BsonReader.cs +++ /dev/null @@ -1,330 +0,0 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.IO; -using System.Reflection; -using System.Text; - -namespace MongoDB.Driver.Bson -{ - /// - /// Reads binary streams containing BSON data and converts them to native types. - /// - public class BsonReader - { - private Stream stream; - private BinaryReader reader; - private int position = 0; - - private byte[] _byteBuffer; - private char[] _charBuffer; - - private const int MaxCharBytesSize = 128; - - private byte[] seqRange1 = new byte[]{0,127}; //Range of 1-byte sequence - private byte[] seqRange2 = new byte[]{194,223};//Range of 2-byte sequence - private byte[] seqRange3 = new byte[]{224,239};//Range of 3-byte sequence - private byte[] seqRange4 = new byte[]{240,244};//Range of 4-byte sequence - - public BsonReader (Stream stream) - { - this.stream = stream; - reader = new BinaryReader (this.stream); - } - - public int Position { - get { return position; } - } - - public Document Read () - { - position = 0; - Document doc = ReadDocument(); - return doc; - } - - public Document ReadDocument(){ - int startpos = position; - Document doc = new Document (); - int size = reader.ReadInt32 (); - position += 4; - while ((position - startpos) + 1 < size) { - ReadElement (doc); - } - byte eoo = reader.ReadByte (); - position++; - if (eoo != 0) - throw new InvalidDataException ("Document not null terminated"); - if (size != position - startpos) { - throw new InvalidDataException (string.Format ("Should have read {0} bytes from stream but only read {1}", size, (position - startpos))); - } - return doc; - } - - public void ReadElement (Document doc){ - sbyte typeNum = (sbyte)reader.ReadByte (); - position++; - String key = ReadString (); - Object element = ReadElementType(typeNum); - doc.Add (key, element); - } - - public Object ReadElementType (sbyte typeNum){ - switch ((BsonDataType)typeNum) { - case BsonDataType.Null: - case BsonDataType.Undefined: - return DBNull.Value; - case BsonDataType.MinKey: - return MongoMinKey.Value; - case BsonDataType.MaxKey: - return MongoMaxKey.Value; - case BsonDataType.Boolean: - position++; - return reader.ReadBoolean (); - case BsonDataType.Integer: - position += 4; - return reader.ReadInt32 (); - case BsonDataType.Long: - position += 8; - return reader.ReadInt64 (); - case BsonDataType.Date: - position += 8; - long millis = reader.ReadInt64 (); - return BsonInfo.Epoch.AddMilliseconds(millis); - case BsonDataType.Oid: - position += 12; - return new Oid (reader.ReadBytes (12)); - case BsonDataType.Number: - position += 8; - return reader.ReadDouble (); - case BsonDataType.String:{ - return ReadLenString (); - } - case BsonDataType.Obj:{ - Document doc = this.ReadDocument(); - if(DBRef.IsDocumentDBRef(doc)){ - return DBRef.FromDocument(doc); - } - return doc; - } - - case BsonDataType.Array:{ - Document doc = this.ReadDocument(); - return ConvertToArray (doc); - } - case BsonDataType.Regex:{ - MongoRegex r = new MongoRegex (); - r.Expression = this.ReadString (); - r.Options = this.ReadString (); - return r; - } - case BsonDataType.Code:{ - Code c = new Code (); - c.Value = ReadLenString(); - return c; - } - case BsonDataType.CodeWScope:{ - int startpos = position; - int size = reader.ReadInt32 (); - position += 4; - - String val = this.ReadLenString(); - Document scope = this.ReadDocument(); - if (size != position - startpos) { - throw new System.IO.InvalidDataException (string.Format ("Should have read {0} bytes from stream but read {1} in CodeWScope", size, position - startpos)); - } - return new CodeWScope (val, scope); - } - case BsonDataType.Binary:{ - int size = reader.ReadInt32 (); - position += 4; - byte subtype = reader.ReadByte (); - position ++; - if (subtype == (byte)Binary.TypeCode.General) { - size = reader.ReadInt32 (); - position += 4; - } - byte[] bytes = reader.ReadBytes (size); - position += size; - - // From http://en.wikipedia.org/wiki/Universally_Unique_Identifier - // The most widespread use of this standard is in Microsoft's Globally Unique Identifiers (GUIDs). - if (subtype == 3 && 16 == size) - { - return new Guid(bytes); - } - - Binary b = new Binary(); - b.Bytes = bytes; - b.Subtype = (Binary.TypeCode)subtype; - return b; - } - default: - throw new ArgumentOutOfRangeException (String.Format ("Type Number: {0} not recognized", typeNum)); - } - } - - public string ReadString (){ - EnsureBuffers (); - - StringBuilder builder = new StringBuilder(); - int totalBytesRead = 0; - int offset = 0; - do { - int byteCount = 0; - int count = offset; - byte b = 0;; - while (count < MaxCharBytesSize && (b = reader.ReadByte ()) > 0) { - _byteBuffer[count++] = b; - } - byteCount = count - offset; - totalBytesRead += byteCount; - position += byteCount; - - if(count == 0) break; //first byte read was the terminator. - int lastFullCharStop = GetLastFullCharStop(count - 1); - - int charCount = Encoding.UTF8.GetChars (_byteBuffer, 0, lastFullCharStop + 1, _charBuffer, 0); - builder.Append (_charBuffer, 0, charCount); - - if(lastFullCharStop < byteCount - 1){ - offset = byteCount - lastFullCharStop - 1; - //Copy end bytes to begining - Array.Copy(_byteBuffer, lastFullCharStop + 1, _byteBuffer, 0, offset); - }else{ - offset = 0; - } - - if(b == 0){ - break; - } - } while (true); - position++; - return builder.ToString(); - - } - - public string ReadLenString (){ - int length = reader.ReadInt32 (); - string s = GetString (length - 1); - reader.ReadByte (); - - position += (4 + 1); - return s; - } - - private string GetString (int length){ - if (length == 0) - return string.Empty; - - EnsureBuffers (); - - StringBuilder builder = new StringBuilder (length);; - - int totalBytesRead = 0; - int offset = 0; - do { - int byteCount = 0; - int count = ((length - totalBytesRead) > MaxCharBytesSize - offset) ? (MaxCharBytesSize - offset) : - (length - totalBytesRead); - - byteCount = reader.BaseStream.Read (_byteBuffer, offset, count); - totalBytesRead += byteCount; - byteCount += offset; - - int lastFullCharStop = 0; - lastFullCharStop = GetLastFullCharStop(byteCount - 1); - - if (byteCount == 0) - throw new EndOfStreamException ("Unable to read beyond the end of the stream."); - - int charCount = Encoding.UTF8.GetChars (_byteBuffer, 0, lastFullCharStop + 1, _charBuffer, 0); - builder.Append (_charBuffer, 0, charCount); - - if(lastFullCharStop < byteCount - 1){ - offset = byteCount - lastFullCharStop - 1; - //Copy end bytes to begining - Array.Copy(_byteBuffer, lastFullCharStop + 1, _byteBuffer, 0, offset); - }else{ - offset = 0; - } - - } while (totalBytesRead < length); - - position += totalBytesRead; - return builder.ToString (); - } - - private int GetLastFullCharStop(int start){ - int lookbackPos = start; - int bis = 0; - while(lookbackPos >= 0){ - bis = BytesInSequence(_byteBuffer[lookbackPos]); - if(bis == 0){ - lookbackPos--; - continue; - }else if(bis == 1){ - break; - }else{ - lookbackPos--; - break; - } - } - if(bis == start - lookbackPos){ - //Full character. - return start; - }else{ - return lookbackPos; - } - } - - private int BytesInSequence(byte b){ - if(b <= seqRange1[1]) return 1; - if(b >= seqRange2[0] && b <= seqRange2[1]) return 2; - if(b >= seqRange3[0] && b <= seqRange3[1]) return 3; - if(b >= seqRange4[0] && b <= seqRange4[1]) return 4; - return 0; - } - - private void EnsureBuffers (){ - if (_byteBuffer == null) { - _byteBuffer = new byte[MaxCharBytesSize]; - } - if (_charBuffer == null) { - int charBufferSize = Encoding.UTF8.GetMaxCharCount (MaxCharBytesSize); - _charBuffer = new char[charBufferSize]; - } - } - - private Type GetTypeForIEnumerable (Document doc){ - if (doc.Keys.Count < 1) - return typeof(Object); - Type comp = null; - foreach (String key in doc.Keys) { - Object obj = doc[key]; - Type test = obj.GetType (); - if (comp == null) { - comp = test; - } else { - if (comp != test) - return typeof(Object); - } - } - return comp; - } - - private IEnumerable ConvertToArray (Document doc){ - var genericListType = typeof(List<>); - var arrayType = GetTypeForIEnumerable(doc); - var listType = genericListType.MakeGenericType(arrayType); - - var list = (IList)Activator.CreateInstance(listType); - - foreach (String key in doc.Keys) { - list.Add(doc[key]); - } - - return list; - } - } -} diff --git a/MongoDBDriver/Bson/BsonWriter.cs b/MongoDBDriver/Bson/BsonWriter.cs deleted file mode 100644 index e4eaf8a1..00000000 --- a/MongoDBDriver/Bson/BsonWriter.cs +++ /dev/null @@ -1,319 +0,0 @@ -using System; -using System.Collections; -using System.IO; -using System.Text; - -namespace MongoDB.Driver.Bson -{ - /// - /// Class that knows how to format a native object into bson bits. - /// - public class BsonWriter - { - private Stream stream; - private BinaryWriter writer; - private Encoding encoding = Encoding.UTF8; - private int buffLength = 256; - private byte[] buffer; - int maxChars; - - public BsonWriter(Stream stream){ - this.stream = stream; - writer = new BinaryWriter(this.stream); - buffer = new byte[buffLength]; - maxChars = buffLength / encoding.GetMaxByteCount(1); - } - - public void Write(Document doc){ - int size = CalculateSize(doc); - if(size >= BsonInfo.MaxDocumentSize) throw new ArgumentException("Maximum document size exceeded."); - writer.Write(size); - foreach(String key in doc.Keys){ - Object val = doc[key]; - BsonDataType t = TranslateToBsonType(val); - writer.Write((byte)t); - this.WriteString(key); - this.WriteValue(t,val); - } - writer.Write((byte)0); - } - - public void WriteArray(IEnumerable arr){ - int size = CalculateSize(arr); - writer.Write(size); - int keyname = 0; - foreach(Object val in arr){ - BsonDataType t = TranslateToBsonType(val); - writer.Write((byte)t); - this.WriteString(keyname.ToString()); - this.WriteValue(t,val); - keyname++; - } - writer.Write((byte)0); - } - - public void WriteValue(BsonDataType dt, Object obj){ - switch (dt){ - case BsonDataType.MinKey: - case BsonDataType.MaxKey: - case BsonDataType.Null: - return; - case BsonDataType.Boolean: - writer.Write((bool)obj); - return; - case BsonDataType.Integer: - writer.Write((int)obj); - return; - case BsonDataType.Long: - writer.Write((long)obj); - return; - case BsonDataType.Date: - DateTime d = (DateTime)obj; - TimeSpan diff = d.ToUniversalTime() - BsonInfo.Epoch; - double time = Math.Floor(diff.TotalMilliseconds); - writer.Write((long)time); - return; - case BsonDataType.Oid: - Oid id = (Oid) obj; - writer.Write(id.ToByteArray()); - return; - case BsonDataType.Number: - writer.Write((double)obj); - return; - case BsonDataType.String:{ - String str = (String)obj; - writer.Write(CalculateSize(str,false)); - this.WriteString(str); - return; - } - case BsonDataType.Obj: - if(obj is Document){ - this.Write((Document)obj); - }else if(obj is DBRef){ - this.Write((Document)((DBRef)obj)); - } - return; - case BsonDataType.Array: - this.WriteArray((IEnumerable)obj); - return; - case BsonDataType.Regex:{ - MongoRegex r = (MongoRegex)obj; - this.WriteString(r.Expression); - this.WriteString(r.Options); - return; - } - case BsonDataType.Code:{ - Code c = (Code)obj; - this.WriteValue(BsonDataType.String,c.Value); - return; - } - case BsonDataType.CodeWScope:{ - CodeWScope cw = (CodeWScope)obj; - writer.Write(CalculateSize(cw)); - this.WriteValue(BsonDataType.String,cw.Value); - this.WriteValue(BsonDataType.Obj,cw.Scope); - return; - } - case BsonDataType.Binary:{ - if (obj is Guid) { - writer.Write((int)16); - writer.Write((byte)3); - writer.Write(((Guid)obj).ToByteArray()); - } else { - Binary b = (Binary)obj; - if(b.Subtype == Binary.TypeCode.General){ - writer.Write(b.Bytes.Length + 4); - writer.Write((byte)b.Subtype); - writer.Write(b.Bytes.Length); - }else{ - writer.Write(b.Bytes.Length); - writer.Write((byte)b.Subtype); - } - writer.Write(b.Bytes); - } - return; - } - default: - throw new NotImplementedException(String.Format("Writing {0} types not implemented.",obj.GetType().Name)); - } - } - - public void WriteString(String str){ - int byteCount = encoding.GetByteCount(str); - if(byteCount < buffLength){ - encoding.GetBytes(str,0,str.Length,buffer,0); - writer.Write(buffer,0,byteCount); - }else{ - int charCount; - int totalCharsWritten = 0; - - for (int i = str.Length; i > 0; i -= charCount){ - charCount = (i > maxChars) ? maxChars : i; - int count = encoding.GetBytes(str, totalCharsWritten, charCount, buffer, 0); - writer.Write(buffer, 0, count); - totalCharsWritten += charCount; - } - } - writer.Write((byte)0); - } - - public int CalculateSize(Object val){ - if(val == null) return 0; - switch (TranslateToBsonType(val)){ - case BsonDataType.MinKey: - case BsonDataType.MaxKey: - case BsonDataType.Null: - return 0; - case BsonDataType.Boolean: - return 1; - case BsonDataType.Integer: - return 4; - case BsonDataType.Long: - case BsonDataType.Date: - return 8; - case BsonDataType.Oid: - return 12; - case BsonDataType.Number: - return sizeof(Double); - case BsonDataType.String: - return CalculateSize((string)val); - case BsonDataType.Obj:{ - Type t = val.GetType(); - if(t == typeof(Document)){ - return CalculateSize((Document)val); - } - if(t == typeof(DBRef)){ - return CalculateSize((Document)((DBRef)val)); - } - throw new NotImplementedException(String.Format("Calculating size of {0} is not implemented yet.",t.Name)); - } - case BsonDataType.Array: - return CalculateSize((IEnumerable)val); - case BsonDataType.Regex:{ - MongoRegex r = (MongoRegex)val; - int size = CalculateSize(r.Expression,false); - size += CalculateSize(r.Options,false); - return size; - } - case BsonDataType.Code: - Code c = (Code)val; - return CalculateSize(c.Value,true); - case BsonDataType.CodeWScope:{ - CodeWScope cw = (CodeWScope)val; - int size = 4; - size += CalculateSize(cw.Value,true); - size += CalculateSize(cw.Scope); - return size; - } - case BsonDataType.Binary:{ - if (val is Guid) - return 21; - Binary b = (Binary)val; - int size = 4; //size int - size += 1; //subtype - if (b.Subtype == Binary.TypeCode.General) - { - size += 4; //embedded size int - } - size += b.Bytes.Length; - return size; - } - default: - throw new NotImplementedException(String.Format("Calculating size of {0} is not implemented.",val.GetType().Name)); - } - } - - public int CalculateSize(Document doc){ - int size = 4; - foreach(String key in doc.Keys){ - int elsize = 1; //type - elsize += CalculateSize(key,false); - elsize += CalculateSize(doc[key]); - size += elsize; - } - size += 1; //terminator - return size; - } - - public int CalculateSize(IEnumerable arr){ - int size = 4;//base size for the object - int keyname = 0; - foreach(Object o in arr){ - int elsize = 1; //type - size += CalculateSize(keyname.ToString(),false); //element name - size += CalculateSize(o); - size += elsize; - keyname++; - } - size += 1; //terminator - return size; - } - - public int CalculateSize(String val){ - return CalculateSize(val, true); - } - - public int CalculateSize(String val, bool includeLen){ - int size = 1; //terminator - if(includeLen) size += 4; - if(val != null) size += encoding.GetByteCount(val); - return size; - } - - public void Flush(){ - writer.Flush(); - } - - protected BsonDataType TranslateToBsonType(Object val){ - if(val == null)return BsonDataType.Null; - Type t = val.GetType(); - //special case enums - if(val is Enum){ - t = Enum.GetUnderlyingType(t); - } - BsonDataType ret; - if(t == typeof(Double)){ - ret = BsonDataType.Number; - }else if(t == typeof(Single)){ - ret = BsonDataType.Number; - }else if(t == typeof(String)){ - ret = BsonDataType.String; - }else if(t == typeof(Document)){ - ret = BsonDataType.Obj; - }else if(t == typeof(int)){ - ret = BsonDataType.Integer; - }else if(t == typeof(long)){ - ret = BsonDataType.Long; - }else if(t == typeof(bool)){ - ret = BsonDataType.Boolean; - }else if(t == typeof(Oid)){ - ret = BsonDataType.Oid; - }else if(t == typeof(DateTime)){ - ret = BsonDataType.Date; - }else if(t == typeof(MongoRegex)){ - ret = BsonDataType.Regex; - }else if(t == typeof(DBRef)){ - ret = BsonDataType.Obj; - }else if(t == typeof(Code)){ - ret = BsonDataType.Code; - }else if(t == typeof(CodeWScope)){ - ret = BsonDataType.CodeWScope; - }else if(t == typeof(DBNull)){ - ret = BsonDataType.Null; - }else if(t == typeof(Binary)){ - ret = BsonDataType.Binary; - }else if(t == typeof(Guid)){ - ret = BsonDataType.Binary; - }else if(t == typeof(MongoMinKey)){ - ret = BsonDataType.MinKey; - }else if(t == typeof(MongoMaxKey)){ - ret = BsonDataType.MaxKey; - }else if(val is IEnumerable){ - ret = BsonDataType.Array; - }else{ - throw new ArgumentOutOfRangeException(String.Format("Type: {0} not recognized",t.FullName)); - } - return ret; - } - } -} diff --git a/MongoDBDriver/Code.cs b/MongoDBDriver/Code.cs deleted file mode 100644 index 52378708..00000000 --- a/MongoDBDriver/Code.cs +++ /dev/null @@ -1,17 +0,0 @@ -namespace MongoDB.Driver -{ - public class Code - { - public string Value {get; set;} - - public Code(){} - - public Code(string value){ - this.Value = value; - } - - public override string ToString() { - return string.Format(@"{{ ""$code"": ""{0}"" }}", JsonFormatter.Escape(Value)); - } - } -} diff --git a/MongoDBDriver/CodeWScope.cs b/MongoDBDriver/CodeWScope.cs deleted file mode 100644 index 15409ed4..00000000 --- a/MongoDBDriver/CodeWScope.cs +++ /dev/null @@ -1,18 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - public class CodeWScope : Code - { - public Document Scope {get;set;} - - public CodeWScope(){} - - public CodeWScope(String code):this(code, new Document()){} - - public CodeWScope(String code, Document scope){ - this.Value = code; - this.Scope = scope; - } - } -} diff --git a/MongoDBDriver/Collection.cs b/MongoDBDriver/Collection.cs deleted file mode 100644 index a753f9a0..00000000 --- a/MongoDBDriver/Collection.cs +++ /dev/null @@ -1,336 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using MongoDB.Driver.Connections; -using MongoDB.Driver.Protocol; - -namespace MongoDB.Driver -{ - public class Collection : IMongoCollection - { - private Connection connection; - - private string name; - public string Name { - get { return name; } - } - - private string dbName; - public string DbName { - get { return dbName; } - } - - public string FullName{ - get{ return dbName + "." + name;} - } - - private CollectionMetaData metaData; - public CollectionMetaData MetaData { - get { - if(metaData == null){ - metaData = new CollectionMetaData(this.dbName,this.name, this.connection); - } - return metaData; - } - } - - private Database db; - private Database Db{ - get{ - if(db == null) - db = new Database(this.connection, this.dbName); - return db; - } - } - public Collection(string name, Connection conn, string dbName) - { - this.name = name; - this.connection = conn; - this.dbName = dbName; - } - - /// - /// Finds and returns the first document in a query. - /// - /// - /// A representing the query. - /// - /// - /// A from the collection. - /// - public Document FindOne(Document spec){ - ICursor cur = this.Find(spec, -1, 0, null); - foreach(Document doc in cur.Documents){ - cur.Dispose(); - return doc; - } - //FIXME Decide if this should throw a not found exception instead of returning null. - return null; //this.Find(spec, -1, 0, null)[0]; - } - - public ICursor FindAll() { - Document spec = new Document(); - return this.Find(spec, 0, 0, null); - } - - public ICursor Find(String where){ - Document spec = new Document(); - spec.Append("$where", new Code(where)); - return this.Find(spec, 0, 0, null); - } - - public ICursor Find(Document spec) { - return this.Find(spec, 0, 0, null); - } - - public ICursor Find(Document spec, int limit, int skip) { - return this.Find(spec, limit, skip, null); - } - - public ICursor Find(Document spec, int limit, int skip, Document fields) { - if(spec == null) spec = new Document(); - Cursor cur = new Cursor(connection, this.FullName, spec, limit, skip, fields); - return cur; - } - - /// - /// Entrypoint into executing a map/reduce query against the collection. - /// - /// - /// A - /// - public MapReduce MapReduce(){ - return new MapReduce(this.Db, this.Name); - } - - public MapReduceBuilder MapReduceBuilder(){ - return new MapReduceBuilder(this.MapReduce()); - } - - - /// - ///Count all items in the collection. - /// - public long Count(){ - return this.Count(new Document()); - } - - /// - /// Count all items in a collection that match the query spec. - /// - /// - /// It will return 0 if the collection doesn't exist yet. - /// - public long Count(Document spec){ - try{ - //Database db = new Database(this.connection, this.dbName); - Document ret = this.Db.SendCommand(new Document().Append("count",this.Name).Append("query",spec)); - double n = (double)ret["n"]; - return Convert.ToInt64(n); - }catch(MongoCommandException){ - //FIXME This is an exception condition when the namespace is missing. - //-1 might be better here but the console returns 0. - return 0; - } - - } - - /// - /// Inserts the Document into the collection. - /// - public void Insert (Document doc, bool safemode){ - Insert(doc); - CheckError(safemode); - } - - public void Insert(Document doc){ - Document[] docs = new Document[]{doc,}; - this.Insert(docs); - } - - public void Insert (IEnumerable docs, bool safemode){ - if(safemode)this.Db.ResetError(); - this.Insert(docs); - CheckPreviousError(safemode); - } - - public void Insert(IEnumerable docs){ - InsertMessage im = new InsertMessage(); - im.FullCollectionName = this.FullName; - List idocs = new List(); - foreach(Document doc in docs){ - if(doc.Contains("_id") == false){ - Oid _id = Oid.NewOid(); - doc.Prepend("_id",_id); - } - } - idocs.AddRange(docs); - im.Documents = idocs.ToArray(); - try{ - this.connection.SendMessage(im); - }catch(IOException ioe){ - throw new MongoCommException("Could not insert document, communication failure", this.connection,ioe); - } - } - - /// - /// Deletes documents from the collection according to the spec. - /// - /// An empty document will match all documents in the collection and effectively truncate it. - /// - public void Delete (Document selector, bool safemode){ - Delete(selector); - CheckError(safemode); - } - - /// - /// Deletes documents from the collection according to the spec. - /// - /// An empty document will match all documents in the collection and effectively truncate it. - /// - public void Delete(Document selector){ - DeleteMessage dm = new DeleteMessage(); - dm.FullCollectionName = this.FullName; - dm.Selector = selector; - try{ - this.connection.SendMessage(dm); - }catch(IOException ioe){ - throw new MongoCommException("Could not delete document, communication failure", this.connection,ioe); - } - } - - - public void Update (Document doc, bool safemode){ - Update(doc); - CheckError(safemode); - } - - /// - /// Saves a document to the database using an upsert. - /// - /// - /// The document will contain the _id that is saved to the database. This is really just an alias - /// to Update(Document) to maintain consistency between drivers. - /// - public void Save(Document doc){ - Update(doc); - } - - /// - /// Updates a document with the data in doc as found by the selector. - /// - /// - /// _id will be used in the document to create a selector. If it isn't in - /// the document then it is assumed that the document is new and an upsert is sent to the database - /// instead. - /// - public void Update(Document doc){ - //Try to generate a selector using _id for an existing document. - //otherwise just set the upsert flag to 1 to insert and send onward. - Document selector = new Document(); - int upsert = 0; - if(doc.Contains("_id") & doc["_id"] != null){ - selector["_id"] = doc["_id"]; - }else{ - //Likely a new document - doc.Prepend("_id",Oid.NewOid()); - upsert = 1; - } - this.Update(doc, selector, upsert); - } - - public void Update (Document doc, Document selector, bool safemode){ - Update(doc, selector,0,safemode); - } - - /// - /// Updates a document with the data in doc as found by the selector. - /// - public void Update(Document doc, Document selector){ - this.Update(doc, selector, 0); - } - - public void Update (Document doc, Document selector, UpdateFlags flags, bool safemode){ - Update(doc,selector,flags); - CheckError(safemode); - } - - /// - /// Updates a document with the data in doc as found by the selector. - /// - /// The to update with - /// - /// - /// The query spec to find the document to update. - /// - /// - /// - /// - public void Update(Document doc, Document selector, UpdateFlags flags){ - UpdateMessage um = new UpdateMessage(); - um.FullCollectionName = this.FullName; - um.Selector = selector; - um.Document = doc; - um.Flags = (int)flags; - try{ - this.connection.SendMessage(um); - }catch(IOException ioe){ - throw new MongoCommException("Could not update document, communication failure", this.connection,ioe); - } - - } - - public void Update (Document doc, Document selector, int flags, bool safemode){ - Update(doc,selector,flags); - CheckError(safemode); - } - - public void Update(Document doc, Document selector, int flags){ - //TODO Update the interface and make a breaking change. - this.Update(doc,selector,(UpdateFlags)flags); - } - - /// - /// Runs a multiple update query against the database. It will wrap any - /// doc with $set if the passed in doc doesn't contain any '$' ops. - /// - /// - /// - public void UpdateAll(Document doc, Document selector){ - bool foundOp = false; - foreach(string key in doc.Keys){ - if(key.IndexOf('$') == 0){ - foundOp = true; - break; - } - } - if(foundOp == false){ - //wrap document in a $set. - Document s = new Document().Append("$set", doc); - doc = s; - } - this.Update(doc, selector, UpdateFlags.MultiUpdate); - } - - - public void UpdateAll (Document doc, Document selector, bool safemode){ - if(safemode)this.Db.ResetError(); - this.UpdateAll(doc, selector); - CheckPreviousError(safemode); - } - - - private void CheckError(bool safemode){ - if(safemode){ - Document err = this.Db.GetLastError(); - if(ErrorTranslator.IsError(err)) throw ErrorTranslator.Translate(err); - } - } - private void CheckPreviousError(bool safemode){ - if(safemode){ - Document err = this.Db.GetPreviousError(); - if(ErrorTranslator.IsError(err)) throw ErrorTranslator.Translate(err); - } - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/CollectionMetaData.cs b/MongoDBDriver/CollectionMetaData.cs deleted file mode 100644 index bd72c5c5..00000000 --- a/MongoDBDriver/CollectionMetaData.cs +++ /dev/null @@ -1,107 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Text; -using MongoDB.Driver.Connections; - -namespace MongoDB.Driver -{ - /// - /// Lazily loaded meta data on the collection. - /// - public class CollectionMetaData - { - private string fullName; - private string name; - private Database db; - - public CollectionMetaData (string dbName, string name, Connection conn){ - this.fullName = dbName + "." + name; - this.name = name; - this.db = new Database (conn, dbName); - } - - private Document options = null; - public Document Options { - get { - if (options != null) - return options; - Document doc = db["system.namespaces"].FindOne (new Document ().Append ("name", this.fullName)); - if (doc == null) - doc = new Document (); - if (doc.Contains ("create")) - doc.Remove ("create"); - //Not sure why this is here. The python driver has it. - this.options = doc; - return this.options; - } - } - - private bool gotIndexes = false; - private Dictionary indexes = new Dictionary (); - public Dictionary Indexes { - get { - if (gotIndexes) - return indexes; - - indexes.Clear (); - - ICursor docs = db["system.indexes"].Find (new Document ().Append ("ns", this.fullName)); - foreach (Document doc in docs.Documents) { - indexes.Add ((string)doc["name"], doc); - } - - return indexes; - } - } - - public void CreateIndex (string name, Document fieldsAndDirections, bool unique){ - Document index = new Document (); - index["name"] = name; - index["ns"] = this.fullName; - index["key"] = fieldsAndDirections; - index["unique"] = unique; - db["system.indexes"].Insert (index); - this.refresh (); - } - - public void CreateIndex (Document fieldsAndDirections, bool unique){ - string name = this.generateIndexName (fieldsAndDirections, unique); - this.CreateIndex (name, fieldsAndDirections, unique); - } - - public void DropIndex (string name){ - Document cmd = new Document (); - cmd.Append ("deleteIndexes", this.name).Append ("index", name); - db.SendCommand (cmd); - this.refresh (); - } - - public void Rename (string newName){ - if (string.IsNullOrEmpty (newName)) - throw new ArgumentException ("Name must not be null or empty", "newName"); - - Document cmd = new Document (); - cmd.Append ("renameCollection", fullName).Append ("to", db.Name + "." + newName); - db.GetSisterDatabase ("admin").SendCommand (cmd); - this.refresh (); - } - - public void refresh (){ - indexes.Clear (); - gotIndexes = false; - options = null; - } - - protected string generateIndexName (Document fieldsAndDirections, bool unique){ - StringBuilder sb = new StringBuilder ("_"); - foreach (string key in fieldsAndDirections.Keys) { - sb.Append (key).Append ("_"); - } - if (unique) - sb.Append ("unique_"); - - return sb.ToString (); - } - - } -} diff --git a/MongoDBDriver/Configuration/ConnectionCollection.cs b/MongoDBDriver/Configuration/ConnectionCollection.cs deleted file mode 100644 index da8419cd..00000000 --- a/MongoDBDriver/Configuration/ConnectionCollection.cs +++ /dev/null @@ -1,70 +0,0 @@ -using System; -using System.Configuration; - -namespace MongoDB.Driver.Configuration -{ - public class ConnectionCollection : ConfigurationElementCollection - { - - public override ConfigurationElementCollectionType CollectionType { - get { return ConfigurationElementCollectionType.AddRemoveClearMap; } - } - - protected override ConfigurationElement CreateNewElement () - { - return new ConnectionElement (); - } - - protected override Object GetElementKey (ConfigurationElement element) - { - return ((ConnectionElement)element).Name; - } - - public ConnectionElement this[int index] { - get { return (ConnectionElement)BaseGet (index); } - set { - if (BaseGet (index) != null) { - BaseRemoveAt (index); - } - BaseAdd (index, value); - } - } - public new ConnectionElement this[string Name] { - get { return (ConnectionElement)BaseGet (Name); } - } - - public int IndexOf (ConnectionElement conn) - { - return BaseIndexOf (conn); - } - public void Add (ConnectionElement conn) - { - BaseAdd (conn); - } - protected override void BaseAdd (ConfigurationElement element) - { - BaseAdd (element, false); - } - - public void Remove (ConnectionElement conn) - { - if (BaseIndexOf (conn) >= 0) - BaseRemove (conn.Name); - } - - public void RemoveAt (int index) - { - BaseRemoveAt (index); - } - - public void Remove (string name) - { - BaseRemove (name); - } - - public void Clear () - { - BaseClear (); - } - } -} diff --git a/MongoDBDriver/Configuration/ConnectionElement.cs b/MongoDBDriver/Configuration/ConnectionElement.cs deleted file mode 100755 index 6821ec18..00000000 --- a/MongoDBDriver/Configuration/ConnectionElement.cs +++ /dev/null @@ -1,21 +0,0 @@ -using System; -using System.Configuration; - -namespace MongoDB.Driver.Configuration -{ - - public class ConnectionElement : ConfigurationElement - { - [ConfigurationProperty("key", IsRequired = true)] - public string Name{ - get{return (String)this["key"];} - set{this["key"] = value;} - } - - [ConfigurationProperty("connectionString", DefaultValue = "Server=localhost:27017")] - public string ConnectionString{ - get { return (String)this["connectionString"]; } - set { this["connectionString"] = value; } - } - } -} diff --git a/MongoDBDriver/Configuration/MongoConfiguration.cs b/MongoDBDriver/Configuration/MongoConfiguration.cs deleted file mode 100644 index ec60afad..00000000 --- a/MongoDBDriver/Configuration/MongoConfiguration.cs +++ /dev/null @@ -1,20 +0,0 @@ -using System; -using System.Configuration; - -namespace MongoDB.Driver.Configuration -{ - public class MongoConfiguration : ConfigurationSection - { - - public MongoConfiguration() { } - - [ConfigurationProperty("connections", IsDefaultCollection = false)] - [ConfigurationCollection(typeof(ConnectionCollection), - AddItemName = "add", - ClearItemsName = "clear", - RemoveItemName = "remove")] - public ConnectionCollection Connections{ - get{return (ConnectionCollection)this["connections"];} - } - } -} diff --git a/MongoDBDriver/Connections/Connection.cs b/MongoDBDriver/Connections/Connection.cs deleted file mode 100644 index da2a93a6..00000000 --- a/MongoDBDriver/Connections/Connection.cs +++ /dev/null @@ -1,172 +0,0 @@ -using System; -using System.IO; -using MongoDB.Driver.Protocol; - -namespace MongoDB.Driver.Connections -{ - /// - /// Connection is a managment unit which uses a RawConnection from connection pool - /// to comunicate with the server. - /// - /// If an connection error occure, the RawConnection is transparently replaced - /// by a new fresh connection. - /// - /// - public class Connection : IDisposable - { - private readonly IConnectionFactory _factory; - private RawConnection _connection; - - /// - /// Initializes a new instance of the class. - /// - /// The pool. - public Connection(IConnectionFactory factory) - { - if (factory == null) - throw new ArgumentNullException ("factory"); - - _factory = factory; - } - - /// - /// Releases unmanaged resources and performs other cleanup operations before the - /// is reclaimed by garbage collection. - /// - ~Connection (){ - // make sure the connection returns to pool if the user forget it. - Dispose (); - } - - /// - /// Gets or sets a value indicating whether this instance is authenticated. - /// - /// - /// true if this instance is authenticated; otherwise, false. - /// - public bool IsAuthenticated { - get { return _connection.IsAuthenticated; } - } - - /// - /// Masks as authenticated. - /// - public void MaskAuthenticated (){ - _connection.MarkAuthenticated (); - } - - /// - /// Gets the connection string. - /// - /// The connection string. - public string ConnectionString { - get { return _factory.ConnectionString; } - } - - /// - /// Used for sending a message that gets a reply such as a query. - /// - /// - /// - /// A reconnect will be issued but it is up to the caller to handle the error. - public ReplyMessage SendTwoWayMessage (IRequestMessage msg){ - if (this.State != ConnectionState.Opened) { - throw new MongoCommException ("Operation cannot be performed on a closed connection.", this); - } - try { - ReplyMessage reply = new ReplyMessage (); - lock (_connection) { - msg.Write (_connection.GetStream ()); - reply.Read (_connection.GetStream ()); - } - return reply; - } catch (IOException) { - ReplaceInvalidConnection (); - throw; - } - - } - - /// - /// Used for sending a message that gets no reply such as insert or update. - /// - /// - /// - /// A reconnect will be issued but it is up to the caller to handle the error. - public void SendMessage (IRequestMessage msg){ - if (this.State != ConnectionState.Opened) { - throw new MongoCommException ("Operation cannot be performed on a closed connection.", this); - } - try { - lock (_connection) { - msg.Write (_connection.GetStream ()); - } - } catch (IOException) { - //Sending doesn't seem to always trigger the detection of a closed socket. - ReplaceInvalidConnection (); - throw; - } - } - - /// - /// Gets the state. - /// - /// The state. - public ConnectionState State { - get { return _connection != null ? ConnectionState.Opened : ConnectionState.Closed; } - } - - /// - /// Just sends a simple message string to the database. - /// - /// - /// A - /// - public void SendMsgMessage (String message){ - MsgMessage msg = new MsgMessage (); - msg.Message = message; - this.SendMessage (msg); - } - - /// - /// Opens this instance. - /// - public void Open (){ - _connection = _factory.Open(); - } - - /// - /// Closes this instance. - /// - public void Close (){ - if (_connection == null) - return; - - _factory.Close(_connection); - _connection = null; - } - - /// - /// Replaces the invalid connection. - /// - private void ReplaceInvalidConnection (){ - if (_connection == null) - return; - - _connection.MarkAsInvalid (); - _factory.Close (_connection); - _connection = _factory.Open(); - } - - public Stream GetStream (){ - return _connection.GetStream (); - } - - /// - /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - /// - public void Dispose (){ - Close (); - } - } -} diff --git a/MongoDBDriver/Connections/ConnectionState.cs b/MongoDBDriver/Connections/ConnectionState.cs deleted file mode 100644 index 0eddb664..00000000 --- a/MongoDBDriver/Connections/ConnectionState.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace MongoDB.Driver.Connections -{ - public enum ConnectionState{ - Closed = 0, - Opened = 1, - } -} \ No newline at end of file diff --git a/MongoDBDriver/Cursor.cs b/MongoDBDriver/Cursor.cs deleted file mode 100644 index dab68b79..00000000 --- a/MongoDBDriver/Cursor.cs +++ /dev/null @@ -1,219 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using MongoDB.Driver.Connections; -using MongoDB.Driver.Protocol; - -namespace MongoDB.Driver -{ - public class Cursor : ICursor { - private Connection connection; - - private long id = -1; - public long Id{ - get {return id;} - } - - private String fullCollectionName; - public string FullCollectionName { - get {return fullCollectionName;} - } - - private Document spec; - public ICursor Spec (Document spec){ - TryModify(); - this.spec = spec; - return this; - } - - private int limit; - public ICursor Limit (int limit){ - TryModify(); - this.limit = limit; - return this; - } - - private int skip; - public ICursor Skip (int skip){ - TryModify(); - this.skip = skip; - return this; - } - - private Document fields; - public ICursor Fields (Document fields){ - TryModify(); - this.fields = fields; - return this; - } - - private QueryOptions options; - public ICursor Options(QueryOptions options){ - TryModify(); - this.options = options; - return this; - } - - #region "Spec Options" - private Document specOpts = new Document(); - - public ICursor Sort(string field){ - return this.Sort(field, IndexOrder.Ascending); - } - - public ICursor Sort(string field, IndexOrder order){ - return this.Sort(new Document().Append(field, order)); - } - - public ICursor Sort(Document fields){ - TryModify(); - AddOrRemoveSpecOpt("$orderby", fields); - return this; - } - - public ICursor Hint(Document index){ - TryModify(); - AddOrRemoveSpecOpt("$hint", index); - return this; - } - - public ICursor Snapshot(Document index){ - TryModify(); - AddOrRemoveSpecOpt("$snapshot", index); - return this; - } - - public Document Explain(){ - TryModify(); - specOpts["$explain"] = true; - - IEnumerable docs = this.Documents; - using((IDisposable)docs){ - foreach(Document doc in docs){ - return doc; - } - } - throw new InvalidOperationException("Explain failed."); - } - - #endregion - - private bool modifiable = true; - public bool Modifiable{ - get {return modifiable;} - } - - private ReplyMessage reply; - - public Cursor(Connection conn, string fullCollectionName){ - this.connection = conn; - this.fullCollectionName = fullCollectionName; - } - - public Cursor(Connection conn, String fullCollectionName, Document spec, int limit, int skip, Document fields): - this(conn,fullCollectionName){ - if(spec == null)spec = new Document(); - this.spec = spec; - this.limit = limit; - this.skip = skip; - this.fields = fields; - } - - public IEnumerable Documents{ - get{ - if(this.reply == null){ - RetrieveData(); - } - int docsReturned = 0; - Document[] docs = this.reply.Documents; - Boolean shouldBreak = false; - while(!shouldBreak){ - foreach(Document doc in docs){ - if((this.limit == 0) || (this.limit != 0 && docsReturned < this.limit)){ - docsReturned++; - yield return doc; - }else{ - shouldBreak = true; - yield break; - } - } - if(this.Id != 0 && shouldBreak == false){ - RetrieveMoreData(); - docs = this.reply.Documents; - if(docs == null){ - shouldBreak = true; - } - }else{ - shouldBreak = true; - } - } - } - } - - private void RetrieveData(){ - QueryMessage query = new QueryMessage(); - query.FullCollectionName = this.FullCollectionName; - query.Query = BuildSpec(); - query.NumberToReturn = this.limit; - query.NumberToSkip = this.skip; - query.Options = options; - - if(this.fields != null){ - query.ReturnFieldSelector = this.fields; - } - try{ - this.reply = connection.SendTwoWayMessage(query); - this.id = this.reply.CursorID; - if(this.limit < 0)this.limit = this.limit * -1; - }catch(IOException ioe){ - throw new MongoCommException("Could not read data, communication failure", this.connection,ioe); - } - - } - - private void RetrieveMoreData(){ - GetMoreMessage gmm = new GetMoreMessage(this.fullCollectionName, this.Id, this.limit); - try{ - this.reply = connection.SendTwoWayMessage(gmm); - this.id = this.reply.CursorID; - }catch(IOException ioe){ - this.id = 0; - throw new MongoCommException("Could not read data, communication failure", this.connection,ioe); - } - } - - - public void Dispose(){ - if(this.Id == 0) return; //All server side resources disposed of. - KillCursorsMessage kcm = new KillCursorsMessage(this.Id); - try{ - this.id = 0; - connection.SendMessage(kcm); - }catch(IOException ioe){ - throw new MongoCommException("Could not read data, communication failure", this.connection,ioe); - } - } - - private void TryModify(){ - if(this.modifiable) return; - throw new InvalidOperationException("Cannot modify a cursor that has already returned documents."); - } - - private void AddOrRemoveSpecOpt(string key, Document doc){ - if(doc == null){ - specOpts.Remove(key); - }else{ - specOpts[key] = doc; - } - } - - private Document BuildSpec(){ - if(this.specOpts.Count == 0) return this.spec; - Document doc = new Document(); - this.specOpts.CopyTo(doc); - doc["$query"] = this.spec; - return doc; - } - - } -} diff --git a/MongoDBDriver/DBRef.cs b/MongoDBDriver/DBRef.cs deleted file mode 100644 index 7f130d3b..00000000 --- a/MongoDBDriver/DBRef.cs +++ /dev/null @@ -1,117 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - /// - /// Native type that maps to a database reference. Use Database.FollowReference(DBRef) to retrieve the document - /// that it refers to. - /// - /// DBRefs are just a specification for a specially formatted Document. At this time the database - /// does no special handling of them. Any referential integrity must be maintained by the application - /// not the database. - /// - public class DBRef - { - public const string RefName = "$ref"; - public const string IdName = "$id"; - public const string MetaName = "metadata"; - - private Document document; - - /// - /// Initializes a new instance of the class. - /// - public DBRef(){ - document = new Document(); - } - - private string collectionName; - /// - /// The name of the collection the referenced document is in. - /// - public string CollectionName { - get { return collectionName; } - set { - collectionName = value; - document[RefName] = value; - } - } - - private object id; - /// - /// Object value of the id. It isn't an Oid because document ids are not required to be oids. - /// - public object Id { - get { return id; } - set { - id = value; - document[IdName] = value; - } - } - - private Document metadata; - /// - /// An extension to the spec that allows storing of arbitrary data about a reference. - /// - /// This is a non-standard feature. - /// - public Document MetaData { - get{return metadata; } - set{ - metadata = value; - document[MetaName] = value; - } - } - - /// - /// Constructs a DBRef from a document that matches the DBref specification. - /// - public DBRef(Document document){ - if(IsDocumentDBRef(document) == false) throw new ArgumentException("Document is not a valid DBRef"); - collectionName = (String)document[RefName]; - id = document[IdName]; - this.document = document; - if(document.Contains("metadata")) this.MetaData = (Document)document["metadata"]; - } - - public DBRef(string collectionName, object id){ - document = new Document(); - this.CollectionName = collectionName; - this.Id = id; - } - - public override bool Equals(object obj){ - if(obj is DBRef){ - DBRef comp = (DBRef)obj; - return comp.Id.Equals(this.Id) && comp.CollectionName.Equals(this.CollectionName); - } - return base.Equals(obj); - } - - public override int GetHashCode(){ - unchecked{ - return ((this.collectionName != null ? this.collectionName.GetHashCode() : 0) * 397) ^ (this.id != null ? this.id.GetHashCode() : 0); - } - } - - public override string ToString (){ - return document.ToString(); - } - - - /// - /// Deprecated. Use the new DBRef(Document) constructor instead. - /// - public static DBRef FromDocument(Document doc){ - return new DBRef(doc);; - } - - public static bool IsDocumentDBRef(Document doc){ - return doc != null && doc.Contains(RefName) && doc.Contains(IdName); - } - - public static explicit operator Document(DBRef d){ - return d.document; - } - } -} diff --git a/MongoDBDriver/Database.cs b/MongoDBDriver/Database.cs deleted file mode 100644 index 14a424b8..00000000 --- a/MongoDBDriver/Database.cs +++ /dev/null @@ -1,207 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Security.Cryptography; -using System.Text; -using MongoDB.Driver.Connections; - -namespace MongoDB.Driver -{ - public class Database - { - private Connection connection; - private IMongoCollection command; - - public Database(string connectionString, String name){ - this.connection = ConnectionFactory.GetConnection(connectionString); - this.Name = name; - this.command = this["$cmd"]; - } - - public Database(Connection conn, String name){ - this.connection = conn; - this.Name = name; - this.command = this["$cmd"]; - } - - public string Name { get; private set; } - - private DatabaseMetaData metaData; - public DatabaseMetaData MetaData { - get { return metaData ?? (metaData = new DatabaseMetaData(this.Name, this.connection)); } - } - - private DatabaseJS js; - public DatabaseJS JS { - get { return js ?? (js = new DatabaseJS(this)); } - } - - public List GetCollectionNames(){ - IMongoCollection namespaces = this["system.namespaces"]; - ICursor cursor = namespaces.Find(new Document()); - List names = new List(); - foreach (Document doc in cursor.Documents){ - names.Add((String)doc["name"]); //Fix Me: Should filter built-ins - } - return names; - } - - public IMongoCollection this[ String name ] { - get{ - return this.GetCollection(name); - } - } - - public IMongoCollection GetCollection(String name){ - IMongoCollection col = new Collection(name, this.connection, this.Name); - return col; - } - - /// - /// Gets the document that a reference is pointing to. - /// - public Document FollowReference(DBRef reference){ - if(reference == null) - throw new ArgumentNullException("reference", "cannot be null"); - Document query = new Document().Append("_id", reference.Id); - return this[reference.CollectionName].FindOne(query); - } - - /// - /// Most operations do not have a return code in order to save the client from having to wait for results. - /// GetLastError can be called to retrieve the return code if clients want one. - /// - public Document GetLastError(){ - return SendCommand("getlasterror"); - } - - /// - /// Retrieves the last error and forces the database to fsync all files before returning. - /// - /// Server version 1.3+ - public Document GetLastErrorAndFSync(){ - return SendCommand(new Document {{"getlasterror", 1.0},{"fsync", true}}); - } - - /// - /// Call after sending a bulk operation to the database. - /// - public Document GetPreviousError(){ - return SendCommand("getpreverror"); - } - - /// - /// Gets the sister database on the same Mongo connection with the given name. - /// - public Database GetSisterDatabase(string sisterDbName){ - return new Database(connection, sisterDbName); - } - - /// - /// Resets last error. This is good to call before a bulk operation. - /// - public void ResetError(){ - SendCommand("reseterror"); - } - - public Document Eval(string javascript){ - return Eval(javascript, new Document()); - } - - public Document Eval(string javascript, Document scope){ - return Eval(new CodeWScope(javascript, scope)); - } - - public Document Eval(CodeWScope cw){ - Document cmd = new Document().Append("$eval", cw); - return SendCommand(cmd); - } - - public Document SendCommand(string command){ - AuthenticateIfRequired(); - return SendCommandCore(command); - } - - public Document SendCommand(Document cmd) - { - AuthenticateIfRequired(); - return SendCommandCore(cmd); - } - - private Document SendCommandCore(string command) - { - var cmd = new Document().Append(command,1.0); - return SendCommandCore(cmd); - } - - private Document SendCommandCore(Document cmd) - { - Document result = this.command.FindOne(cmd); - double ok = (double)result["ok"]; - if(ok != 1.0) - { - var msg = string.Empty; - if(result.Contains("msg")) - { - msg = (string)result["msg"]; - } - else if(result.Contains("errmsg")) - { - msg = (string)result["errmsg"]; - } - throw new MongoCommandException(msg, result, cmd); - } - return result; - } - - /// - /// Authenticates the on first request. - /// - private void AuthenticateIfRequired() - { - if(connection.IsAuthenticated) - return; - - var builder = new MongoConnectionStringBuilder(connection.ConnectionString); - - if(string.IsNullOrEmpty(builder.Username)) - return; - - var nonceResult = SendCommandCore("getnonce"); - var nonce = (String)nonceResult["nonce"]; - - if(nonce == null) - throw new MongoException("Error retrieving nonce", null); - - var pwd = Hash(builder.Username + ":mongo:" + builder.Password); - var auth = new Document - { - {"authenticate", 1.0}, - {"user", builder.Username}, - {"nonce", nonce}, - {"key", Hash(nonce + builder.Username + pwd)} - }; - try - { - SendCommandCore(auth); - } - catch(MongoCommandException exception) - { - //Todo: use custom exception? - throw new MongoException("Authentication faild for " + builder.Username, exception); - } - - connection.MaskAuthenticated(); - } - - /// - /// Hashes the specified text. - /// - /// The text. - /// - internal static string Hash(string text){ - MD5 md5 = MD5.Create(); - byte[] hash = md5.ComputeHash(Encoding.Default.GetBytes(text)); - return BitConverter.ToString(hash).Replace("-","").ToLower(); - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/DatabaseJS.cs b/MongoDBDriver/DatabaseJS.cs deleted file mode 100644 index 540a02a7..00000000 --- a/MongoDBDriver/DatabaseJS.cs +++ /dev/null @@ -1,165 +0,0 @@ -using System; -using System.Collections.Generic; - -namespace MongoDB.Driver{ - - /// - /// Encapsulates and provides access to the serverside javascript stored in db.system.js. - /// - public class DatabaseJS : ICollection - { - //private Connection connection; - private Database db; - private IMongoCollection js; - - internal DatabaseJS (Database db){ - this.db = db; - this.js = db["system.js"]; - //Needed for some versions of the db to retrieve the functions. - js.MetaData.CreateIndex(new Document().Append("_id",1),true); - } - - public Document this[ String name ] { - get{ - return GetFunction(name); - } - set{ - Add(value); - } - } - - /// - /// Gets the document representing the function in the database. - /// - /// - /// A - /// - /// - /// A - /// - public Document GetFunction(string name){ - return js.FindOne(new Document().Append("_id", name)); - } - - /// - /// Returns a listing of the names of all the functions in the database - /// - public List GetFunctionNames(){ - List l = new List(); - foreach(Document d in js.FindAll().Documents){ - l.Add((String)d["_id"]); - } - return l; - } - - public void Add (string name, string func){ - Add(name, new Code(func)); - } - - public void Add (string name, Code func){ - Add(new Document().Append("_id", name).Append("value", func)); - } - - /// - /// Store a function in the database with an extended attribute called version. - /// - /// Version attributes are an extension to the spec. Function names must be unique - /// to the database so only one version can be stored at a time. This is most useful for libraries - /// that store function in the database to make sure that the function they are using is the most - /// up to date. - /// - public void Add(string name, Code func, float version){ - Add(new Document().Append("_id", name).Append("value", func).Append("version",version)); - } - - /// - /// Stores a function in the database. - /// - public void Add (Document item){ - if(js.FindOne(new Document().Append("_id", item["_id"])) != null){ - throw new ArgumentException(String.Format("Function {0} already exists in the database.", item["_id"])); - } - js.Insert(item); - } - - /// - /// Removes every function in the database. - /// - public void Clear (){ - js.Delete(new Document()); - } - - public bool Contains (Document item){ - return Contains((string)item["_id"]); - } - - /// - /// Checks to see if a function named name is stored in the database. - /// - /// - /// A - /// - /// - /// A - /// - public bool Contains (string name){ - return GetFunction(name) != null; - } - - /// - /// Copies the functions from the database ordered by _id (name) to the array starting at the index. - /// - /// - /// A array to coppy to - /// - /// - /// A - /// - public void CopyTo (Document[] array, int arrayIndex){ - Document query = new Document().Append("$orderby", new Document().Append("_id", 1)); - int idx = arrayIndex; - foreach(Document doc in js.Find(query,array.Length - 1,arrayIndex).Documents){ - array[idx] = doc; - idx++; - } - } - - public void Update(Document item){ - throw new System.NotImplementedException(); - } - - public bool Remove (Document item){ - return Remove((string)item["_id"]); - } - - public bool Remove (string name){ - js.Delete(new Document().Append("_id", name)); - return true; - } - - public int Count { - get { - long cnt = js.Count(); - if(cnt > int.MaxValue) return int.MaxValue; //lots of functions. - return (int)cnt; - } - } - - public bool IsReadOnly { - get { - return false; - } - } - - public IEnumerator GetEnumerator (){ - foreach(Document doc in js.FindAll().Documents){ - yield return doc; - } - yield break; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator (){ - return GetEnumerator(); - } - } -} diff --git a/MongoDBDriver/DatabaseMetaData.cs b/MongoDBDriver/DatabaseMetaData.cs deleted file mode 100644 index d757c374..00000000 --- a/MongoDBDriver/DatabaseMetaData.cs +++ /dev/null @@ -1,76 +0,0 @@ -using System; -using MongoDB.Driver.Connections; - -namespace MongoDB.Driver -{ - /// - /// Administration of metadata for a database. - /// - public class DatabaseMetaData - { - private Connection connection; - private string name; - private Database db; - - public DatabaseMetaData(string name, Connection conn){ - this.connection = conn; - this.name = name; - this.db = new Database(conn, name); - } - - public Collection CreateCollection(String name){ - return this.CreateCollection(name,null); - } - - public Collection CreateCollection(String name, Document options){ - Document cmd = new Document(); - cmd.Append("create", name).Update(options); - db.SendCommand(cmd); - return new Collection(name, connection, this.name); - } - - - public Boolean DropCollection(Collection col){ - return this.DropCollection(col.Name); - } - - public Boolean DropCollection(String name){ - Document result = db.SendCommand(new Document().Append("drop",name)); - return result.Contains("ok") && ((double)result["ok"] == 1); - } - - public Boolean DropDatabase(){ - Document result = db.SendCommand("dropDatabase"); - return result.Contains("ok") && ((double)result["ok"] == 1); - } - - public void AddUser(string username, string password){ - IMongoCollection users = db["system.users"]; - string pwd = Database.Hash(username + ":mongo:" + password); - Document user = new Document().Append("user", username).Append("pwd", pwd); - - if (FindUser(username) != null){ - throw new MongoException("A user with the name " + username + " already exists in this database.", null); - } - users.Insert(user); - } - - public void RemoveUser(string username){ - IMongoCollection users = db["system.users"]; - users.Delete(new Document().Append("user", username)); - } - - public ICursor ListUsers(){ - IMongoCollection users = db["system.users"]; - return users.FindAll(); - } - - public Document FindUser(string username){ - return FindUser(new Document().Append("user",username)); - } - - public Document FindUser(Document spec){ - return db["system.users"].FindOne(spec); - } - } -} diff --git a/MongoDBDriver/Document.cs b/MongoDBDriver/Document.cs deleted file mode 100644 index 881aca6e..00000000 --- a/MongoDBDriver/Document.cs +++ /dev/null @@ -1,154 +0,0 @@ -using System; -using System.Collections; -using System.Collections.Generic; - -namespace MongoDB.Driver -{ - /// - /// Description of Document. - /// - public class Document : DictionaryBase - { - private List orderedKeys = new List (); - - public Object this[String key] { - get { return Dictionary[key]; } - set { - if (orderedKeys.Contains (key) == false) { - orderedKeys.Add (key); - } - Dictionary[key] = value; - } - } - - public ICollection Keys { - get { return (orderedKeys); } - } - - public ICollection Values { - get { return (Dictionary.Values); } - } - - public void Add (String key, Object value) - { - Dictionary.Add (key, value); - //Relies on ArgumentException from above if key already exists. - orderedKeys.Add (key); - } - - public Document Append (String key, Object value) - { - this.Add (key, value); - return this; - } - - /// - /// Adds an item to the Document at the specified position - /// - public void Insert (String key, Object value, int Position) - { - Dictionary.Add (key, value); - //Relies on ArgumentException from above if key already exists. - orderedKeys.Insert (Position, key); - } - public Document Prepend (String key, Object value) - { - this.Insert (key, value, 0); - return this; - } - - public Document Update (Document @from) - { - if (@from == null) - return this; - foreach (String key in @from.Keys) { - this[key] = @from[key]; - } - return this; - } - - public bool Contains (String key) - { - return (orderedKeys.Contains (key)); - } - - public void Remove (String key) - { - Dictionary.Remove (key); - orderedKeys.Remove (key); - } - - public new void Clear () - { - Dictionary.Clear (); - orderedKeys.Clear (); - } - - /// - /// TODO Fix any accidental reordering issues. - /// - /// - public void CopyTo (Document dest) - { - foreach (String key in orderedKeys) { - if (dest.Contains (key)) - dest.Remove (key); - dest[key] = this[key]; - } - } - - public override bool Equals (object obj) - { - if (obj is Document) { - return Equals (obj as Document); - } - return base.Equals (obj); - } - - public bool Equals (Document obj) - { - if (obj == null) - return false; - if (orderedKeys.Count != obj.orderedKeys.Count) - return false; - return this.GetHashCode () == obj.GetHashCode (); - } - - public override int GetHashCode (){ - int hash = 27; - foreach (var key in orderedKeys) { - var valueHashCode = GetValueHashCode (this[key]); - unchecked { - hash = (13 * hash) + key.GetHashCode (); - hash = (13 * hash) + valueHashCode; - } - } - return hash; - } - - private int GetValueHashCode (object value) - { - if (value == null) { - return 0; - } - return (value is Array) ? GetArrayHashcode ((Array)value) : value.GetHashCode (); - } - - private int GetArrayHashcode (Array array) - { - var hash = 0; - foreach (var value in array) { - var valueHashCode = GetValueHashCode (value); - unchecked { - hash = (13 * hash) + valueHashCode; - } - } - return hash; - } - - public override string ToString () - { - return JsonFormatter.Serialize (this); - } - } -} diff --git a/MongoDBDriver/Exceptions/MongoCommException.cs b/MongoDBDriver/Exceptions/MongoCommException.cs deleted file mode 100644 index cdc7260c..00000000 --- a/MongoDBDriver/Exceptions/MongoCommException.cs +++ /dev/null @@ -1,34 +0,0 @@ -using System; -using MongoDB.Driver.Connections; - -namespace MongoDB.Driver -{ - /// - /// - /// - public class MongoCommException : MongoException - { - /// - /// Gets or sets the connection string. - /// - /// The connection string. - public string ConnectionString { get; private set; } - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The connection. - public MongoCommException(string message, Connection connection):this(message,connection,null){} - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The connection. - /// The inner. - public MongoCommException(string message, Connection connection, Exception inner):base(message,inner){ - ConnectionString = connection.ConnectionString; - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/Exceptions/MongoCommandException.cs b/MongoDBDriver/Exceptions/MongoCommandException.cs deleted file mode 100644 index ec33535d..00000000 --- a/MongoDBDriver/Exceptions/MongoCommandException.cs +++ /dev/null @@ -1,44 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - /// - /// Raised when a command returns a failure message. - /// - public class MongoCommandException : MongoException - { - /// - /// Gets or sets the error. - /// - /// The error. - public Document Error { get; private set; } - - /// - /// Gets or sets the command. - /// - /// The command. - public Document Command { get; private set; } - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - /// The command. - public MongoCommandException(string message, Document error, Document command):base(message,null){ - this.Error = error; - this.Command = command; - } - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - /// The command. - /// The e. - public MongoCommandException(string message, Document error, Document command, Exception e):base(message,e){ - this.Error = error; - this.Command = command; - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/Exceptions/MongoDuplicateKeyException.cs b/MongoDBDriver/Exceptions/MongoDuplicateKeyException.cs deleted file mode 100644 index 072ff585..00000000 --- a/MongoDBDriver/Exceptions/MongoDuplicateKeyException.cs +++ /dev/null @@ -1,25 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - /// - /// Raised when an action causes a unique constraint violation in an index. - /// - public class MongoDuplicateKeyException : MongoOperationException - { - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - public MongoDuplicateKeyException(string message, Document error):base(message, error,null){} - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - /// The e. - public MongoDuplicateKeyException(string message, Document error, Exception e):base(message, error,e){} - } -} \ No newline at end of file diff --git a/MongoDBDriver/Exceptions/MongoException.cs b/MongoDBDriver/Exceptions/MongoException.cs deleted file mode 100644 index 37b14a7f..00000000 --- a/MongoDBDriver/Exceptions/MongoException.cs +++ /dev/null @@ -1,23 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - /// - /// Base class for all Mongo Exceptions - /// - public class MongoException : Exception - { - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The inner. - public MongoException(string message, Exception inner):base(message,inner){} - - /// - /// Initializes a new instance of the class. - /// - /// The message. - public MongoException(string message):base(message){} - } -} \ No newline at end of file diff --git a/MongoDBDriver/Exceptions/MongoMapReduceException.cs b/MongoDBDriver/Exceptions/MongoMapReduceException.cs deleted file mode 100644 index e327580c..00000000 --- a/MongoDBDriver/Exceptions/MongoMapReduceException.cs +++ /dev/null @@ -1,24 +0,0 @@ -namespace MongoDB.Driver -{ - /// - /// Raised when a map reduce call fails. - /// - public class MongoMapReduceException : MongoCommandException - { - /// - /// Gets or sets the map reduce result. - /// - /// The map reduce result. - public MapReduce.MapReduceResult MapReduceResult { get; private set; } - - /// - /// Initializes a new instance of the class. - /// - /// The exception. - /// The map reduce. - public MongoMapReduceException(MongoCommandException exception, MapReduce mapReduce) - :base(exception.Message,exception.Error, exception.Command) { - MapReduceResult = new MapReduce.MapReduceResult(exception.Error); - } - } -} diff --git a/MongoDBDriver/Exceptions/MongoOperationException.cs b/MongoDBDriver/Exceptions/MongoOperationException.cs deleted file mode 100644 index e8c36d6c..00000000 --- a/MongoDBDriver/Exceptions/MongoOperationException.cs +++ /dev/null @@ -1,30 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - public class MongoOperationException : MongoException - { - /// - /// Gets or sets the error. - /// - /// The error. - public Document Error { get; private set; } - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - public MongoOperationException(string message, Document error):this(message, error,null){} - - /// - /// Initializes a new instance of the class. - /// - /// The message. - /// The error. - /// The e. - public MongoOperationException(string message, Document error, Exception e):base(message,e){ - this.Error = error; - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/ICursor.cs b/MongoDBDriver/ICursor.cs deleted file mode 100644 index cfd1fe36..00000000 --- a/MongoDBDriver/ICursor.cs +++ /dev/null @@ -1,21 +0,0 @@ -using System; -using System.Collections.Generic; - -namespace MongoDB.Driver { - public interface ICursor : IDisposable { - long Id { get; } - string FullCollectionName { get; } - ICursor Spec(Document spec); - ICursor Limit(int limit); - ICursor Skip(int skip); - ICursor Fields (Document fields); - ICursor Sort(string field); - ICursor Sort(string field, IndexOrder order); - ICursor Sort(Document fields); - ICursor Hint(Document index); - ICursor Snapshot(Document index); - Document Explain(); - bool Modifiable { get; } - IEnumerable Documents { get; } - } -} diff --git a/MongoDBDriver/IMongoCollection.cs b/MongoDBDriver/IMongoCollection.cs deleted file mode 100644 index f47a2c89..00000000 --- a/MongoDBDriver/IMongoCollection.cs +++ /dev/null @@ -1,39 +0,0 @@ -using System; -using System.Collections.Generic; - -namespace MongoDB.Driver -{ - public interface IMongoCollection - { - string Name { get; } - string DbName { get; } - string FullName { get; } - CollectionMetaData MetaData { get; } - Document FindOne (Document spec); - ICursor FindAll (); - ICursor Find (String @where); - ICursor Find (Document spec); - ICursor Find (Document spec, int limit, int skip); - ICursor Find (Document spec, int limit, int skip, Document fields); - MapReduce MapReduce (); - MapReduceBuilder MapReduceBuilder (); - long Count (); - long Count (Document spec); - void Insert (Document doc); - void Insert (Document doc, bool safemode); - void Insert (IEnumerable docs); - void Insert (IEnumerable docs, bool safemode); - void Delete (Document selector); - void Delete (Document selector, bool safemode); - void Update (Document doc); - void Update (Document doc, Document selector); - void Update (Document doc, Document selector, int upsert); - void Update (Document doc, Document selector, UpdateFlags flags); - void Update (Document doc, bool safemode); - void Update (Document doc, Document selector, bool safemode); - void Update (Document doc, Document selector, int upsert, bool safemode); - void Update (Document doc, Document selector, UpdateFlags flags, bool safemode); - void UpdateAll (Document doc, Document selector); - void UpdateAll (Document doc, Document selector, bool safemode); - } -} diff --git a/MongoDBDriver/IndexOrder.cs b/MongoDBDriver/IndexOrder.cs deleted file mode 100644 index 31ba930f..00000000 --- a/MongoDBDriver/IndexOrder.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace MongoDB.Driver -{ - public enum IndexOrder { - Descending = -1, - Ascending = 1 - } -} \ No newline at end of file diff --git a/MongoDBDriver/MapReduce.cs b/MongoDBDriver/MapReduce.cs deleted file mode 100644 index 9019ff95..00000000 --- a/MongoDBDriver/MapReduce.cs +++ /dev/null @@ -1,257 +0,0 @@ -using System; -using System.Collections.Generic; - -namespace MongoDB.Driver -{ - /// - /// A fluent interface for executing a Map/Reduce call against a collection. - /// - public class MapReduce : IDisposable - { - - public class MapReduceResult{ - Document result; - Document counts; - public MapReduceResult(Document result){ - this.result = result; - this.counts = (Document)result["counts"]; - } - public string CollectionName{ - get{return (string)result["result"];} - } - public long InputCount{ - get{return Convert.ToInt64(counts["input"]);} - } - - public long EmitCount{ - get{return Convert.ToInt64(counts["emit"]);} - } - - public long OutputCount{ - get{return Convert.ToInt64(counts["output"]);} - } - - public long Time{ - get{return Convert.ToInt64(result["timeMillis"]);} - } - - private TimeSpan span = TimeSpan.MinValue; - public TimeSpan TimeSpan{ - get{ - if(span == TimeSpan.MinValue) span = TimeSpan.FromMilliseconds(this.Time); - return span; - } - } - - public Boolean Ok{ - get{return (1.0 == Convert.ToDouble(result["ok"]));} - } - - public String ErrorMessage{ - get{ - if(result.Contains("msg"))return (String)result["msg"]; - return String.Empty; - } - } - public override string ToString (){ - return result.ToString(); - } - - } - - Database db; - Document cmd; - - #region "Properties" - string name; - public string Name { - get {return (String)cmd["mapreduce"];} - } - - MapReduceResult result; - /// - /// Holds the resulting value of the execution. - /// - public MapReduceResult Result { - get {return result;} - } - - internal MapReduce (Database db, string name){ - this.db = db; - this.cmd = new Document().Append("mapreduce", name); - this.Verbose = true; - } - - /// The map function references the variable this to inspect the current object under consideration. - /// A map function must call emit(key,value) at least once, but may be invoked any number of times, - /// as may be appropriate. - /// - public Code Map { - get {return (Code)cmd["map"];} - set { - TryModify(); - cmd["map"] = value;} - } - - /// - /// The reduce function receives a key and an array of values. To use, reduce the received values, - /// and return a result. - /// - /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions - /// must be idempotent. If you need to perform an operation only once, use a finalize function. - public Code Reduce { - get {return (Code)cmd["reduce"];} - set { - TryModify(); - cmd["reduce"] = value; - } - } - - #region "Options" - - public Document Query{ - get{return (Document)cmd["query"];} - set{ - TryModify(); - cmd["query"] = value; - } - } - - /// - /// Sort the query. Useful for optimization - /// - public Document Sort { - get {return (Document)cmd["sort"];} - set { - TryModify(); - cmd["sort"] = value; - } - } - - /// - /// Number of objects to return from collection - /// - public long Limit { - get {return (long)cmd["limit"];} - set { - TryModify(); - cmd["limit"] = value; - } - } - - /// - /// Name of the final collection the results should be stored in. - /// - /// A temporary collection is still used and then renamed to the target name atomically. - /// - public string Out { - get {return (string)cmd["out"];} - set { - TryModify(); - cmd["out"] = value; - } - } - - /// - /// When true the generated collection is not treated as temporary. Specifying out automatically makes - /// the collection permanent - /// - public bool KeepTemp { - get {return Convert.ToBoolean(cmd["keeptemp"]);} - set { - TryModify(); - cmd["keeptemp"] = value; - } - } - - /// - /// Provides statistics on job execution time. - /// - public bool Verbose { - get {return (bool)cmd["verbose"];} - set { - TryModify(); - cmd["verbose"] = value; - } - } - - /// - /// Function to apply to all the results when finished. - /// - public Code Finalize { - get {return (Code)cmd["finalize"];} - set { - TryModify(); - cmd["finalize"] = value; - } - } - - /// - /// Document where fields go into javascript global scope - /// - public Document Scope { - get {return (Document)cmd["scope"];} - set { - TryModify(); - cmd["scope"] = value; - } - } - - #endregion - #endregion - - public MapReduce Execute(){ - if(cmd.Contains("map") == false || cmd.Contains("reduce") == false){ - throw new InvalidOperationException("Cannot execute without a map and reduce function"); - } - canModify = false; - try{ - result = new MapReduce.MapReduceResult(db.SendCommand(cmd)); - }catch(MongoCommandException mce){ - result = new MapReduce.MapReduceResult(mce.Error); - throw new MongoMapReduceException(mce, this); - } - return this; - } - - public IEnumerable Documents { - get { - if(result == null) Execute(); - if(result.Ok == false) - throw new InvalidOperationException("Documents cannot be iterated when an error was returned from execute."); - - IEnumerable docs = db[result.CollectionName].FindAll().Documents; - using((IDisposable)docs){ - foreach(Document doc in docs){ - yield return doc; - } - } - } - } - - bool canModify = true; - public Boolean CanModify{ - get{return canModify;} - } - - internal void TryModify(){ - if(canModify == false){ - throw new InvalidOperationException("Cannot modify a map/reduce that has already executed"); - } - } - - #region IDisposable implementation - private bool disposing = false; - public void Dispose (){ - if(KeepTemp == true || this.Out != null || disposing == true) return; - disposing = true; - - if(this.result == null || this.result.Ok == false) return; //Nothing to do. - - //Drop the temporary collection that was created as part of results. - db.MetaData.DropCollection(this.result.CollectionName); - } - - #endregion - } -} diff --git a/MongoDBDriver/MapReduceBuilder.cs b/MongoDBDriver/MapReduceBuilder.cs deleted file mode 100644 index c90eb629..00000000 --- a/MongoDBDriver/MapReduceBuilder.cs +++ /dev/null @@ -1,137 +0,0 @@ -using System; - -namespace MongoDB.Driver -{ - /// - /// Provides a Fluent interface to build and execute Map/Reduce calls. - /// - public class MapReduceBuilder : IDisposable - { - MapReduce mr; - public MapReduce MapReduce { - get { - return mr; - } - } - - public MapReduceBuilder(MapReduce mr){ - this.mr = mr; - } - - /// The map function references the variable this to inspect the current object under consideration. - /// A map function must call emit(key,value) at least once, but may be invoked any number of times, - /// as may be appropriate. - /// - public MapReduceBuilder Map(string function){ - return this.Map(new Code(function)); - } - - /// The map function references the variable this to inspect the current object under consideration. - /// A map function must call emit(key,value) at least once, but may be invoked any number of times, - /// as may be appropriate. - /// - public MapReduceBuilder Map(Code function){ - mr.Map = function; - return this; - } - - /// - /// The reduce function receives a key and an array of values. To use, reduce the received values, - /// and return a result. - /// - /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions - /// must be idempotent. If you need to perform an operation only once, use a finalize function. - public MapReduceBuilder Reduce(string function){ - return this.Reduce(new Code(function)); - } - - /// - /// The reduce function receives a key and an array of values. To use, reduce the received values, - /// and return a result. - /// - /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions - /// must be idempotent. If you need to perform an operation only once, use a finalize function. - public MapReduceBuilder Reduce(Code function){ - mr.Reduce = function; - return this; - } - - /// - /// Query filter object - /// - public MapReduceBuilder Query(Document query){ - mr.Query = query; - return this; - } - - /// - /// Sort the query. Useful for optimization - /// - public MapReduceBuilder Sort(Document sort){ - mr.Sort = sort; - return this; - } - - /// - /// Number of objects to return from collection - /// - public MapReduceBuilder Limit(long limit){ - mr.Limit = limit; - return this; - } - - /// - /// Name of the final collection the results should be stored in. - /// - /// A temporary collection is still used and then renamed to the target name atomically. - /// - public MapReduceBuilder Out(String name){ - mr.Out = name; - return this; - } - - /// - /// When true the generated collection is not treated as temporary. Specifying out automatically makes - /// the collection permanent - /// - public MapReduceBuilder KeepTemp(bool keep){ - mr.KeepTemp = keep; - return this; - } - - /// - /// Provides statistics on job execution time. - /// - public MapReduceBuilder Verbose(bool val){ - mr.Verbose = val; - return this; - } - - /// - /// Function to apply to all the results when finished. - /// - public MapReduceBuilder Finalize(Code function){ - mr.Finalize = function; - return this; - } - - /// - /// Document where fields go into javascript global scope - /// - public MapReduceBuilder Scope(Document scope){ - mr.Scope = scope; - return this; - } - - public MapReduce Execute(){ - mr.Execute(); - return mr; - } - - #region IDisposable implementation - public void Dispose (){ - mr.Dispose(); - } - #endregion - } -} diff --git a/MongoDBDriver/Mongo.cs b/MongoDBDriver/Mongo.cs deleted file mode 100644 index 365c10ee..00000000 --- a/MongoDBDriver/Mongo.cs +++ /dev/null @@ -1,80 +0,0 @@ -using System; -using MongoDB.Driver.Connections; - -namespace MongoDB.Driver -{ - /// - /// Description of Mongo. - /// - public class Mongo : IDisposable - { - private Connection connection; - - /// - /// Initializes a new instance of the class. - /// - public Mongo () : this(string.Empty){ - } - - /// - /// Initializes a new instance of the class. - /// - /// The connection string. - public Mongo (string connectionString){ - if (connectionString == null) - throw new ArgumentNullException ("connectionString"); - - connection = ConnectionFactory.GetConnection (connectionString); - } - - /// - /// Gets the connection string. - /// - /// The connection string. - public string ConnectionString { - get { return connection.ConnectionString; } - } - - /// - /// Gets the named database. - /// - /// The name. - /// - public Database GetDatabase (String name){ - return new Database (connection, name); - } - - /// - /// Gets the with the specified name. - /// - /// - public Database this[String name] { - get { return this.GetDatabase (name); } - } - - /// - /// Connects this instance. - /// - /// - public Boolean Connect (){ - connection.Open (); - return connection.State == ConnectionState.Opened; - } - - /// - /// Disconnects this instance. - /// - /// - public Boolean Disconnect (){ - connection.Close (); - return connection.State == ConnectionState.Closed; - } - - /// - /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - /// - public void Dispose (){ - connection.Dispose (); - } - } -} diff --git a/MongoDBDriver/MongoConnectionStringBuilder.cs b/MongoDBDriver/MongoConnectionStringBuilder.cs deleted file mode 100644 index 41a98403..00000000 --- a/MongoDBDriver/MongoConnectionStringBuilder.cs +++ /dev/null @@ -1,317 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Text.RegularExpressions; - -namespace MongoDB.Driver -{ - [Serializable] - public class MongoConnectionStringBuilder - { - public const int DefaultMaximumPoolSize = 100; - public const int DefaultMinimumPoolSize = 0; - public const bool DefaultPooled = true; - public static readonly TimeSpan DefaultConnectionTimeout = TimeSpan.FromSeconds(15); - public static readonly TimeSpan DefaultConnectionLifeTime = TimeSpan.Zero; - - private static readonly Regex PairRegex = new Regex ("^\\s*(.*)\\s*=\\s*(.*)\\s*$"); - private static readonly Regex ServerRegex = new Regex ("\\s*([^:]+)(?::(\\d+))?\\s*$"); - - private readonly List _servers = new List (); - - /// - /// Initializes a new instance of the - /// - /// class. Uses the default server connection when - /// no server is added. - /// - public MongoConnectionStringBuilder (){ - ConnectionLifetime = DefaultConnectionLifeTime; - ConnectionTimeout = DefaultConnectionTimeout; - MaximumPoolSize = DefaultMaximumPoolSize; - MinimumPoolSize = DefaultMinimumPoolSize; - Pooled = DefaultPooled; - } - - /// - /// Initializes a new instance of the - /// - /// class. Uses the default server connection when - /// no server is added. - /// - /// The connection string. - public MongoConnectionStringBuilder (string connectionString) : this(){ - - if (!string.IsNullOrEmpty (connectionString)) - Parse (connectionString); - } - - /// - /// Gets the servers. - /// - /// The servers. - public MongoServerEndPoint[] Servers { - get { return _servers.Count == 0 ? new[] { MongoServerEndPoint.Default } : _servers.ToArray (); } - } - - /// - /// Gets or sets the password. - /// - /// The password. - public string Password { get; set; } - - /// - /// Gets or sets the username. - /// - /// The username. - public string Username { get; set; } - - - /// - /// Gets or sets the maximum size of the connection pool. - /// - /// The maximum size of the pool. - public int MaximumPoolSize { get; set; } - - /// - /// Gets or sets the size of the minimum connection pool. - /// - /// The size of the minimal pool. - public int MinimumPoolSize { get; set; } - - /// - /// Gets or sets the connection lifetime in connection pool. - /// - /// The connection lifetime. - public TimeSpan ConnectionLifetime { get; set; } - - /// - /// Gets or sets the connection timeout. - /// - /// The connection timeout. - public TimeSpan ConnectionTimeout { get; set; } - - /// - /// Gets or sets a value indicating whether connection is pooled. - /// - /// true if pooled; otherwise, false. - public bool Pooled { get; set; } - - /// - /// Parses the specified connection string. - /// - /// The connection string. - private void Parse (string connectionString){ - if (connectionString == null) - throw new ArgumentNullException ("connectionString"); - - var segments = connectionString.Split (';'); - - foreach (var segment in segments) { - var pairMatch = PairRegex.Match (segment); - if (!pairMatch.Success) - throw new FormatException (string.Format ("Invalid connection string on: {0}", pairMatch.Value)); - - var key = pairMatch.Groups[1].Value; - var value = pairMatch.Groups[2].Value; - - switch (key) { - case "Username": - case "User Id": - case "User": - { - Username = value; - break; - } - case "Password": - { - Password = value; - break; - } - case "Pooled": - { - try { - Pooled = bool.Parse(value); - } catch(FormatException exception) { - throw new FormatException("Invalid string for Pooled in connection string", exception); - } - break; - } - case "MaximumPoolSize": - case "Max Pool Size": - { - try { - MaximumPoolSize = int.Parse (value); - } catch (FormatException exception) { - throw new FormatException ("Invalid number for MaximumPoolSize in connection string", exception); - } - break; - } - case "MinimumPoolSize": - case "Min Pool Size": - { - try { - MinimumPoolSize = int.Parse (value); - } catch (FormatException exception) { - throw new FormatException ("Invalid number for MinimumPoolSize in connection string", exception); - } - break; - } - case "ConnectionLifetime": - case "Connection Lifetime": - { - try { - var seconds = double.Parse (value); - - ConnectionLifetime = seconds > 0 ? TimeSpan.FromSeconds (seconds) : DefaultConnectionLifeTime; - } catch (FormatException exception) { - throw new FormatException ("Invalid number for ConnectionLifetime in connection string", exception); - } - break; - } - case "ConnectionTimeout": - case "ConnectTimeout": - { - try { - var seconds = double.Parse(value); - - ConnectionTimeout = seconds > 0 ? TimeSpan.FromSeconds(seconds) : DefaultConnectionTimeout; - } catch(FormatException exception) { - throw new FormatException("Invalid number for ConnectionTimeout in connection string", exception); - } - break; - } - case "Server": - case "Servers": - { - var servers = value.Split (','); - - foreach (var server in servers) { - var serverMatch = ServerRegex.Match (server); - if (!serverMatch.Success) - throw new FormatException (string.Format ("Invalid server in connection string: {0}", serverMatch.Value)); - - var serverHost = serverMatch.Groups[1].Value; - - int port; - if (int.TryParse (serverMatch.Groups[2].Value, out port)) - AddServer (serverHost, port); - else - AddServer (serverHost); - } - - break; - } - default: - throw new FormatException (string.Format ("Unknown connection string option: {0}", key)); - } - } - } - - /// - /// Adds the server. - /// - /// The end point. - public void AddServer (MongoServerEndPoint endPoint){ - if (endPoint == null) - throw new ArgumentNullException ("endPoint"); - - _servers.Add (endPoint); - } - - /// - /// Clears the servers. - /// - public void ClearServers (){ - _servers.Clear (); - } - - /// - /// Adds the server with the given host and default port. - /// - /// The host. - public void AddServer (string host){ - AddServer (new MongoServerEndPoint (host)); - } - - /// - /// Adds the server with the given host and port. - /// - /// The host. - /// The port. - public void AddServer (string host, int port){ - AddServer (new MongoServerEndPoint (host, port)); - } - - /// - /// Returns a - /// - /// that represents this instance. - /// - /// A - /// - /// that represents this instance. - public override string ToString (){ - var builder = new StringBuilder (); - - if (!string.IsNullOrEmpty (Username)) { - builder.AppendFormat ("Username={0}", Username); - builder.Append (';'); - } - - if (!string.IsNullOrEmpty (Password)) { - builder.AppendFormat ("Password={0}", Password); - builder.Append (';'); - } - - if (_servers.Count > 0) { - builder.Append ("Server="); - - foreach (var server in _servers) { - builder.Append (server.Host); - - if (server.Port != MongoServerEndPoint.DefaultPort) - builder.AppendFormat (":{0}", server.Port); - - builder.Append (','); - } - - // remove last , - builder.Remove (builder.Length - 1, 1); - - builder.Append (';'); - } - - if(Pooled!=true){ - builder.AppendFormat("Pooled={0}", Pooled); - builder.Append(';'); - } - - if (MaximumPoolSize != DefaultMaximumPoolSize) { - builder.AppendFormat ("MaximumPoolSize={0}", MaximumPoolSize); - builder.Append (';'); - } - - if (MinimumPoolSize != DefaultMinimumPoolSize) { - builder.AppendFormat ("MinimumPoolSize={0}", MinimumPoolSize); - builder.Append (';'); - } - - if (ConnectionTimeout != DefaultConnectionTimeout) { - builder.AppendFormat("ConnectionTimeout={0}", ConnectionTimeout.TotalSeconds); - builder.Append(';'); - } - - if (ConnectionLifetime != DefaultConnectionLifeTime) { - builder.AppendFormat ("ConnectionLifetime={0}", ConnectionLifetime.TotalSeconds); - builder.Append (';'); - } - - // remove last ; - if (builder.Length > 0) - builder.Remove (builder.Length - 1, 1); - - return builder.ToString (); - } - } -} diff --git a/MongoDBDriver/MongoDB.Driver.csproj b/MongoDBDriver/MongoDB.Driver.csproj deleted file mode 100644 index ad831764..00000000 --- a/MongoDBDriver/MongoDB.Driver.csproj +++ /dev/null @@ -1,161 +0,0 @@ - - - - Debug - AnyCPU - 9.0.30729 - 2.0 - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - Library - MongoDB.Driver - C:\Documents and Settings\scorder\Application Data\ICSharpCode/SharpDevelop3.0\Settings.SourceAnalysis - True - False - False - false - - - 2.0 - - - publish\ - true - Disk - false - Foreground - 7 - Days - false - false - true - 0 - 1.0.0.%2a - false - false - true - v2.0 - MongoDB.Driver - - - true - full - false - bin\Debug - DEBUG TRACE - prompt - 4 - false - - - none - false - bin\Release - prompt - 4 - false - - - False - - - False - Auto - 4194304 - AnyCPU - 4096 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - False - - - False - - - False - - - False - - - False - - - - - - - - \ No newline at end of file diff --git a/MongoDBDriver/MongoRegex.cs b/MongoDBDriver/MongoRegex.cs deleted file mode 100644 index 1f0af882..00000000 --- a/MongoDBDriver/MongoRegex.cs +++ /dev/null @@ -1,48 +0,0 @@ -namespace MongoDB.Driver -{ - public class MongoRegex - { - /// - /// A valid regex string including the enclosing / characters. - /// - public string Expression {get; set;} - - /// - /// A string that may contain only the characters 'g', 'i', and 'm'. - /// Because the JS and TenGen representations support a limited range of options, - /// any nonconforming options will be dropped when converting to this representation - /// - public string Options {get;set;} - - /// - /// Initializes a new instance of the class. - /// - public MongoRegex(){} - - /// - /// Initializes a new instance of the class. - /// - /// The expression. - public MongoRegex(string expression):this(expression,string.Empty){} - - /// - /// Initializes a new instance of the class. - /// - /// The expression. - /// The options. - public MongoRegex(string expression, string options){ - this.Expression = expression; - this.Options = options; - } - - /// - /// Returns a that represents this instance. - /// - /// - /// A that represents this instance. - /// - public override string ToString (){ - return string.Format("{0}{1}", Expression, Options); - } - } -} diff --git a/MongoDBDriver/MongoServerEndPoint.cs b/MongoDBDriver/MongoServerEndPoint.cs deleted file mode 100644 index b8332d36..00000000 --- a/MongoDBDriver/MongoServerEndPoint.cs +++ /dev/null @@ -1,72 +0,0 @@ -using System; -using System.Net; - -namespace MongoDB.Driver -{ - /// - /// Represents a mongodb server with host and port. - /// - [Serializable] - public class MongoServerEndPoint : EndPoint - { - public const string DefaultHost = "localhost"; - public const int DefaultPort = 27017; - - /// - /// The default MongoServerEndPoint. - /// - public static readonly MongoServerEndPoint Default = new MongoServerEndPoint(); - - /// - /// Initializes a new instance of the class. - /// - public MongoServerEndPoint() - : this(DefaultHost, DefaultPort) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// The host. - public MongoServerEndPoint(string host) - : this(host, DefaultPort) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// The port. - public MongoServerEndPoint(int port) - : this(DefaultHost, port) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// The host. - /// The port. - public MongoServerEndPoint(string host, int port) - { - if(host == null) - throw new ArgumentNullException("host"); - - Host = host; - Port = port; - } - - /// - /// Gets or sets the host. - /// - /// The host. - public string Host { get; private set; } - - /// - /// Gets or sets the port. - /// - /// The port. - public int Port { get; private set; } - } -} \ No newline at end of file diff --git a/MongoDBDriver/Oid.cs b/MongoDBDriver/Oid.cs deleted file mode 100644 index 3c966497..00000000 --- a/MongoDBDriver/Oid.cs +++ /dev/null @@ -1,173 +0,0 @@ -using System; -using System.Text.RegularExpressions; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver{ - - /// - /// Oid is an immutable object that represents a Mongo ObjectId. - /// - public class Oid: IEquatable, IComparable - { - private static OidGenerator oidGenerator = new OidGenerator(); - - private byte[] bytes; - - /// - /// Gets the created. - /// - /// The created. - public DateTime Created{ - get{ - byte[] time = new byte[4]; - Array.Copy(bytes,time,4); - Array.Reverse(time); - int seconds = BitConverter.ToInt32(time,0); - return BsonInfo.Epoch.AddSeconds(seconds); - } - } - - /// - /// Initializes a new instance of the class. - /// - /// The value. - public Oid(string value){ - value = value.Replace("\"", ""); - ValidateHex(value); - bytes = DecodeHex(value); - } - - /// - /// Initializes a new instance of the class. - /// - /// The value. - public Oid(byte[] value){ - bytes = new byte[12]; - Array.Copy(value,bytes,12); - } - - - /// - /// Determines whether the specified is equal to this instance. - /// - /// The to compare with this instance. - /// - /// true if the specified is equal to this instance; otherwise, false. - /// - public override bool Equals(object obj){ - if(obj is Oid){ - return this.CompareTo((Oid)obj) == 0; - } - return false; - } - - public bool Equals (Oid other){ - return this.CompareTo(other) == 0; - } - - public int CompareTo (Oid other){ - if (System.Object.ReferenceEquals(other, null)){ - return 1; - } - byte[] otherBytes = other.ToByteArray(); - for(int x = 0; x < bytes.Length; x++){ - if(bytes[x] < otherBytes[x]){ - return -1; - }else if(bytes[x] > otherBytes[x]){ - return 1; - } - } - return 0; - } - - /// - /// Returns a hash code for this instance. - /// - /// - /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. - /// - public override int GetHashCode(){ - return ToString().GetHashCode(); - } - - /// - /// Returns a that represents this instance. - /// - /// - /// A that represents this instance. - /// - public override string ToString() { - return String.Format("\"{0}\"",BitConverter.ToString(bytes).Replace("-","").ToLower()); - } - - /// - /// Converts the Oid to a byte array. - /// - public byte[] ToByteArray(){ - byte[] ret = new byte[12]; - Array.Copy(bytes, ret,12); - return ret; - } - - /// - /// Generates an Oid using OidGenerator. - /// - /// - /// A - /// - public static Oid NewOid(){ - return oidGenerator.Generate(); - } - - public static bool operator ==(Oid a, Oid b){ - return a.Equals(b); - } - - public static bool operator !=(Oid a, Oid b){ - return !(a == b); - } - - public static bool operator >(Oid a, Oid b){ - return a.CompareTo(b) > 0; - } - - public static bool operator <(Oid a, Oid b){ - return a.CompareTo(b) < 0; - } - - - /// - /// Validates the hex. - /// - /// The value. - protected void ValidateHex(string value){ - if(value == null || value.Length != 24) throw new ArgumentException("Oid strings should be 24 characters"); - - Regex notHexChars = new Regex(@"[^A-Fa-f0-9]", RegexOptions.None); - if(notHexChars.IsMatch(value)){ - throw new ArgumentOutOfRangeException("value","Value contains invalid characters"); - } - } - - /// - /// Decodes the hex. - /// - /// The value. - /// - protected static byte[] DecodeHex(string value){ - int numberChars = value.Length; - - byte[] bytes = new byte[numberChars / 2]; - for (int i = 0; i < numberChars; i += 2){ - try{ - bytes[i / 2] = Convert.ToByte(value.Substring(i, 2), 16); - } - catch{ - //failed to convert these 2 chars, they may contain illegal charracters - bytes[i / 2] = 0; - } - } - return bytes; - } - } -} diff --git a/MongoDBDriver/OidGenerator.cs b/MongoDBDriver/OidGenerator.cs deleted file mode 100644 index eb4955fd..00000000 --- a/MongoDBDriver/OidGenerator.cs +++ /dev/null @@ -1,102 +0,0 @@ - -using System; -using System.Diagnostics; -using System.Security.Cryptography; -using System.Text; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver -{ - public class OidGenerator - { - private int inc; - private object inclock = new object(); - private byte[] machineHash; - private byte[] procID; - - /// - /// Initializes a new instance of the class. - /// - public OidGenerator(){ - GenerateConstants(); - } - - /// - /// Generates this instance. - /// - /// - public Oid Generate(){ - //FIXME Endian issues with this code. - //.Net runs in native endian mode which is usually little endian. - //Big endian machines don't need the reversing (Linux+PPC, XNA on XBox) - byte[] oid = new byte[12]; - int copyidx = 0; - - byte[] time = BitConverter.GetBytes(GenerateTime()); - Array.Reverse(time); - Array.Copy(time,0,oid,copyidx,4); - copyidx += 4; - - Array.Copy(machineHash,0,oid,copyidx,3); - copyidx += 3; - - Array.Copy(this.procID,2,oid,copyidx,2); - copyidx += 2; - - byte[] inc = BitConverter.GetBytes(GenerateInc()); - Array.Reverse(inc); - Array.Copy(inc,1,oid,copyidx,3); - - return new Oid(oid); - } - - /// - /// Generates the time. - /// - /// - private int GenerateTime(){ - DateTime now = DateTime.UtcNow; - //DateTime nowtime = new DateTime(epoch.Year, epoch.Month, epoch.Day, now.Hour, now.Minute, now.Second, now.Millisecond); - TimeSpan diff = now - BsonInfo.Epoch; - return Convert.ToInt32(Math.Floor(diff.TotalSeconds)); - } - - /// - /// Generates the inc. - /// - /// - private int GenerateInc(){ - lock(this.inclock){ - return ++inc; - } - } - - /// - /// Generates the constants. - /// - private void GenerateConstants(){ - this.machineHash = GenerateHostHash(); - this.procID = BitConverter.GetBytes(GenerateProcId()); - Array.Reverse(this.procID); - } - - /// - /// Generates the host hash. - /// - /// - private byte[] GenerateHostHash(){ - MD5 md5 = MD5.Create(); - string host = System.Net.Dns.GetHostName(); - return md5.ComputeHash(Encoding.Default.GetBytes(host)); - } - - /// - /// Generates the proc id. - /// - /// - private int GenerateProcId(){ - Process proc = Process.GetCurrentProcess(); - return proc.Id; - } - } -} diff --git a/MongoDBDriver/Protocol/DeleteMessage.cs b/MongoDBDriver/Protocol/DeleteMessage.cs deleted file mode 100644 index abd75aaa..00000000 --- a/MongoDBDriver/Protocol/DeleteMessage.cs +++ /dev/null @@ -1,44 +0,0 @@ -/* - * User: scorder - */ -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// - /// - /// - /// struct { - /// MsgHeader header; // standard message header - /// int32 ZERO; // 0 - reserved for future use - /// cstring fullCollectionName; // "dbname.collectionname" - /// int32 ZERO; // 0 - reserved for future use - /// BSON selector; // query object. See below for details. - /// } - /// - public class DeleteMessage : RequestMessageBase - { - public string FullCollectionName { get; set; } - - public Document Selector { get; set; } - - public DeleteMessage(){ - this.Header = new MessageHeader(OpCode.Delete); - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteValue(BsonDataType.Integer,0); - writer.WriteString(this.FullCollectionName); - writer.WriteValue(BsonDataType.Integer,0); - writer.Write(this.Selector); - } - - protected override int CalculateBodySize(BsonWriter writer){ - int size = 8; //first int32, second int32 - size += writer.CalculateSize(this.FullCollectionName,false); - size += writer.CalculateSize(Selector); - return size; - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/Protocol/GetMoreMessage.cs b/MongoDBDriver/Protocol/GetMoreMessage.cs deleted file mode 100644 index 5582d8df..00000000 --- a/MongoDBDriver/Protocol/GetMoreMessage.cs +++ /dev/null @@ -1,51 +0,0 @@ -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// Description of GetMoreMessage. - /// - /// - /// struct { - /// MsgHeader header; // standard message header - /// int32 ZERO; // 0 - reserved for future use - /// cstring fullCollectionName; // "dbname.collectionname" - /// int32 numberToReturn; // number of documents to return - /// int64 cursorID; // cursorID from the OP_REPLY - /// } - /// - public class GetMoreMessage : RequestMessageBase - { - public long CursorID { get; set; } - - public string FullCollectionName { get; set; } - - public int NumberToReturn { get; set; } - - public GetMoreMessage(string fullCollectionName, long cursorID) - :this(fullCollectionName, cursorID, 0){ - } - - public GetMoreMessage(string fullCollectionName, long cursorID, int numberToReturn){ - this.Header = new MessageHeader(OpCode.GetMore); - this.FullCollectionName = fullCollectionName; - this.CursorID = cursorID; - this.NumberToReturn = numberToReturn; - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteValue(BsonDataType.Integer,0); - writer.WriteString(this.FullCollectionName); - writer.WriteValue(BsonDataType.Integer,this.NumberToReturn); - writer.WriteValue(BsonDataType.Long,this.CursorID); - } - - protected override int CalculateBodySize(BsonWriter writer){ - int size = 4; //first int32 - size += writer.CalculateSize(this.FullCollectionName,false); - size += 12; //number to return + cursorid - return size; - } - - } -} diff --git a/MongoDBDriver/Protocol/IRequestMessage.cs b/MongoDBDriver/Protocol/IRequestMessage.cs deleted file mode 100644 index 93605fca..00000000 --- a/MongoDBDriver/Protocol/IRequestMessage.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System.IO; - -namespace MongoDB.Driver.Protocol -{ - /// - /// A Message that is to be written to the database. - /// - public interface IRequestMessage - { - void Write (Stream stream); - } -} diff --git a/MongoDBDriver/Protocol/InsertMessage.cs b/MongoDBDriver/Protocol/InsertMessage.cs deleted file mode 100644 index a6943836..00000000 --- a/MongoDBDriver/Protocol/InsertMessage.cs +++ /dev/null @@ -1,107 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; - -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// Description of InsertMessage. - /// - /// - /// MsgHeader header; // standard message header - /// int32 ZERO; // 0 - reserved for future use - /// cstring fullCollectionName; // "dbname.collectionname" - /// BSON[] documents; // one or more documents to insert into the collection - /// - public class InsertMessage : MessageBase, IRequestMessage - { - protected struct MessageChunk{ - public int Size; - public List Documents; - } - - public string FullCollectionName { get; set; } - - public Document[] Documents { get; set; } - - private List chunks = new List(); - - public InsertMessage(){ - this.Header = new MessageHeader(OpCode.Insert); - } - - public void Write (Stream stream){ - MessageHeader header = this.Header; - BufferedStream bstream = new BufferedStream(stream); - - BsonWriter bwriter = new BsonWriter(bstream); - ChunkMessage(bwriter); - - foreach(MessageChunk chunk in chunks){ - WriteChunk(bstream, chunk); - } - } - - /// - /// Breaks down an insert message that may be too large into managable sizes. - /// When inserting only one document there will be only one chunk. However chances - /// are that when inserting thousands of documents at once there will be many. - /// - protected void ChunkMessage(BsonWriter writer){ - int baseSize = CalculateBaseSize(writer); - - MessageChunk chunk = new MessageChunk(){Size = baseSize, Documents = new List()}; - foreach(Document doc in this.Documents){ - int docSize = writer.CalculateSize(doc); - if(docSize + baseSize >= MessageBase.MaximumMessageSize) throw new MongoException("Document is too big to fit in a message."); - - if(docSize + chunk.Size > MessageBase.MaximumMessageSize){ - chunks.Add(chunk); - chunk = new MessageChunk(){Size = baseSize, Documents = new List()}; - } - chunk.Documents.Add(doc); - chunk.Size += docSize; - } - chunks.Add(chunk); - } - - /// - /// The base size that all chunks will have. - /// - protected int CalculateBaseSize(BsonWriter writer){ - int size = 4; //first int32 - size += writer.CalculateSize(this.FullCollectionName,false); - size += Header.MessageLength; - return size; - } - - /// - /// Writes out a header and the chunk of documents. - /// - /// - /// - protected void WriteChunk (Stream stream, MessageChunk chunk){ - WriteHeader(new BinaryWriter(stream), chunk.Size); - - BsonWriter writer = new BsonWriter(stream); - writer.WriteValue(BsonDataType.Integer,0); - writer.WriteString(this.FullCollectionName); - - foreach(Document doc in chunk.Documents){ - writer.Write(doc); - } - writer.Flush(); - } - - protected void WriteHeader(BinaryWriter writer, int msgSize){ - MessageHeader header = this.Header; - writer.Write(msgSize); - writer.Write(header.RequestId); - writer.Write(header.ResponseTo); - writer.Write((int)header.OpCode); - writer.Flush(); - } - } -} diff --git a/MongoDBDriver/Protocol/KillCursorsMessage.cs b/MongoDBDriver/Protocol/KillCursorsMessage.cs deleted file mode 100644 index 29eeb3f2..00000000 --- a/MongoDBDriver/Protocol/KillCursorsMessage.cs +++ /dev/null @@ -1,49 +0,0 @@ -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// Description of KillCursorsMessage. - /// - /// - /// struct { - /// MsgHeader header; // standard message header - /// int32 ZERO; // 0 - reserved for future use - /// int32 numberOfCursorIDs; // number of cursorIDs in message - /// int64[] cursorIDs; // array of cursorIDs to close - /// } - /// - public class KillCursorsMessage:RequestMessageBase - { - public long[] CursorIDs { get; set; } - - public KillCursorsMessage(){ - this.Header = new MessageHeader(OpCode.KillCursors); - } - - public KillCursorsMessage(long cursorID):this(){ - this.CursorIDs = new long[]{cursorID}; - } - - public KillCursorsMessage(long[] cursorIDs):this(){ - this.CursorIDs = cursorIDs; - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteValue(BsonDataType.Integer,0); - writer.WriteValue(BsonDataType.Integer, this.CursorIDs.Length); - - foreach(long id in this.CursorIDs){ - writer.WriteValue(BsonDataType.Long, id); - } - } - - protected override int CalculateBodySize(BsonWriter writer){ - int size = 8; //first int32, number of cursors - foreach(long id in this.CursorIDs){ - size += 8; - } - return size; - } - } -} diff --git a/MongoDBDriver/Protocol/MessageBase.cs b/MongoDBDriver/Protocol/MessageBase.cs deleted file mode 100644 index 94775b4c..00000000 --- a/MongoDBDriver/Protocol/MessageBase.cs +++ /dev/null @@ -1,12 +0,0 @@ -namespace MongoDB.Driver.Protocol -{ - /// - /// Base class for all raw messages - /// - public abstract class MessageBase - { - public static int MaximumMessageSize = 1024 * 1024 * 4; - - public MessageHeader Header { get; set; } - } -} diff --git a/MongoDBDriver/Protocol/MessageHeader.cs b/MongoDBDriver/Protocol/MessageHeader.cs deleted file mode 100644 index 50f9ae2f..00000000 --- a/MongoDBDriver/Protocol/MessageHeader.cs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * User: scorder - * Date: 7/7/2009 - */ -using System; - -namespace MongoDB.Driver.Protocol -{ /// - /// In general, each Message consists of a standard message header followed by request-specific data. - /// - public class MessageHeader - { - // total size of the message, including the 4 bytes of length - public int MessageLength { get; set; } - - // client or database-generated identifier for this message - public int RequestId { get; set; } - - // requestID from the original request (used in reponses from db) - public int ResponseTo { get; set; } - - // request type - see table below - [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1706:ShortAcronymsShouldBeUppercase", MessageId = "Member")] - public OpCode OpCode { get; set; } - - public MessageHeader(OpCode opCode) - { - this.OpCode = opCode; - this.MessageLength = 16; //The starting size of any message. - } - - public override String ToString(){ - return "length:" + this.MessageLength + " requestId:" + this.RequestId + " responseTo:" + this.ResponseTo + " opCode:" + this.OpCode; - } - - } -} diff --git a/MongoDBDriver/Protocol/MsgMessage.cs b/MongoDBDriver/Protocol/MsgMessage.cs deleted file mode 100644 index 5e602a34..00000000 --- a/MongoDBDriver/Protocol/MsgMessage.cs +++ /dev/null @@ -1,30 +0,0 @@ -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// - /// - /// - /// struct { - /// MsgHeader header; // standard message header - /// cstring message; // message for the database - /// } - /// - public class MsgMessage : RequestMessageBase - { - public string Message { get; set; } - - public MsgMessage(){ - this.Header = new MessageHeader(OpCode.Msg); - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteString(this.Message); - } - - protected override int CalculateBodySize(BsonWriter writer){ - return writer.CalculateSize(this.Message,false); - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/Protocol/OpCode.cs b/MongoDBDriver/Protocol/OpCode.cs deleted file mode 100644 index b72a475e..00000000 --- a/MongoDBDriver/Protocol/OpCode.cs +++ /dev/null @@ -1,14 +0,0 @@ -namespace MongoDB.Driver.Protocol -{ - public enum OpCode{ - Reply = 1, //Reply to a client request. responseTo is set - Msg = 1000, //generic msg command followed by a string - Update = 2001, //update document - Insert = 2002, //insert new document - GetByOID = 2003, //is this used? - Query = 2004, //query a collection - GetMore = 2005, //Get more data from a query. See Cursors - Delete = 2006, //Delete documents - KillCursors = 2007 //Tell database client is done with a cursor - } -} \ No newline at end of file diff --git a/MongoDBDriver/Protocol/QueryMessage.cs b/MongoDBDriver/Protocol/QueryMessage.cs deleted file mode 100644 index 8da95823..00000000 --- a/MongoDBDriver/Protocol/QueryMessage.cs +++ /dev/null @@ -1,75 +0,0 @@ -using System; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// Description of QueryMessage. - /// - /// - /// MsgHeader header; // standard message header - /// int32 opts; // query options. See QueryOptions for values - /// cstring fullCollectionName; // "dbname.collectionname" - /// int32 numberToSkip; // number of documents to skip when returning results - /// int32 numberToReturn; // number of documents to return in the first OP_REPLY - /// BSON query ; // query object. See below for details. - /// [ BSON returnFieldSelector; ] // OPTIONAL : selector indicating the fields to return. See below for details. - /// - public class QueryMessage : RequestMessageBase - { - public QueryOptions Options { get; set; } - - public string FullCollectionName { get; set; } - - public int NumberToSkip { get; set; } - - public int NumberToReturn { get; set; } - - public Document Query { get; set; } - - public Document ReturnFieldSelector { get; set; } - - public QueryMessage(){ - this.Header = new MessageHeader(OpCode.Query); - } - - public QueryMessage(Document query, String fullCollectionName) - :this(query,fullCollectionName,0,0){ - } - - public QueryMessage(Document query, String fullCollectionName, Int32 numberToReturn, Int32 numberToSkip) - :this(query,fullCollectionName,numberToReturn, numberToSkip, null){ - } - - public QueryMessage(Document query, String fullCollectionName, Int32 numberToReturn, - Int32 numberToSkip, Document returnFieldSelector){ - this.Header = new MessageHeader(OpCode.Query); - this.Query = query; - this.FullCollectionName = fullCollectionName; - this.NumberToReturn = numberToReturn; - this.NumberToSkip = numberToSkip; - this.ReturnFieldSelector = returnFieldSelector; - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteValue(BsonDataType.Integer,(int)this.Options); - writer.WriteString(this.FullCollectionName); - writer.WriteValue(BsonDataType.Integer,(int)this.NumberToSkip); - writer.WriteValue(BsonDataType.Integer,(int)this.NumberToReturn); - writer.Write(this.Query); - if(this.ReturnFieldSelector != null){ - writer.Write(this.ReturnFieldSelector); - } - } - - protected override int CalculateBodySize(BsonWriter writer){ - int size = 12; //options, numbertoskip, numbertoreturn - size += writer.CalculateSize(this.FullCollectionName,false); - size += writer.CalculateSize(this.Query); - if(this.ReturnFieldSelector != null){ - size += writer.CalculateSize(this.ReturnFieldSelector); - } - return size; - } - } -} diff --git a/MongoDBDriver/Protocol/ReplyMessage.cs b/MongoDBDriver/Protocol/ReplyMessage.cs deleted file mode 100644 index 1ee0159c..00000000 --- a/MongoDBDriver/Protocol/ReplyMessage.cs +++ /dev/null @@ -1,95 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - public class ReplyMessage:MessageBase - { - // normally zero, non-zero on query failure - public int ResponseFlag { get; set; } - - // id of the cursor created for this query response - public long CursorID { get; set; } - - // indicates where in the cursor this reply is starting - public int StartingFrom { get; set; } - - // number of documents in the reply - public int NumberReturned { get; set; } - - public Document[] Documents { get; set; } - - public void Read(Stream stream){ - stream = new BufferedStream(stream, 256); - BinaryReader reader = new BinaryReader(stream); - this.Header = ReadHeader(reader); - this.ResponseFlag = reader.ReadInt32(); - this.CursorID = reader.ReadInt64(); - this.StartingFrom = reader.ReadInt32(); - this.NumberReturned = reader.ReadInt32(); - - BsonReader breader = new BsonReader(stream); - List docs = new List(); - for(int num = 0; num < this.NumberReturned; num++){ - docs.Add(breader.Read()); - } - this.Documents = docs.ToArray(); - } - - protected MessageHeader ReadHeader(BinaryReader reader){ - MessageHeader hdr = new MessageHeader(OpCode.Reply); - hdr.MessageLength = reader.ReadInt32(); - hdr.RequestId = reader.ReadInt32(); - hdr.ResponseTo = reader.ReadInt32(); - int op = reader.ReadInt32(); - if((OpCode)op != OpCode.Reply) throw new InvalidDataException("Should have been a reply but wasn't"); - return hdr; - } - -// public void Read(Stream stream){ -// /* Used during debugging of the stream. -// BsonReader headerreader = new BsonReader(stream); -// this.Header = ReadHeader(headerreader); -// -// //buffer the whole response into a memorystream for debugging. -// MemoryStream buffer = new MemoryStream(); -// BinaryReader buffReader = new BinaryReader(stream); -// BinaryWriter buffWriter = new BinaryWriter(buffer); -// byte[] body = buffReader.ReadBytes(this.Header.MessageLength - 16); -// System.Console.WriteLine(BitConverter.ToString(body)); -// buffWriter.Write(body); -// buffer.Seek(0, SeekOrigin.Begin); -// -// BsonReader reader = new BsonReader(buffer);*/ -// -// //BsonReader reader = new BsonReader(stream); -// //BsonReader reader = new BsonReader(new BufferedStream(stream)); -// BsonReader reader = new BsonReader(new BufferedStream(stream, 4 * 1024)); -// this.Header = ReadHeader(reader); -// -// this.ResponseFlag = reader.ReadInt32(); -// this.CursorID = reader.ReadInt64(); -// this.StartingFrom = reader.ReadInt32(); -// this.NumberReturned = reader.ReadInt32(); -// -// List docs = new List(); -// for(int num = 0; num < this.NumberReturned; num++){ -// BsonDocument doc = new BsonDocument(); -// doc.Read(reader); -// docs.Add(doc); -// } -// this.Documents = docs.ToArray(); -// } -// -// protected MessageHeader ReadHeader(BsonReader reader){ -// MessageHeader hdr = new MessageHeader(OpCode.Reply); -// hdr.MessageLength = reader.ReadInt32(); -// hdr.RequestId = reader.ReadInt32(); -// hdr.ResponseTo = reader.ReadInt32(); -// int op = reader.ReadInt32(); -// if((OpCode)op != OpCode.Reply) throw new InvalidDataException("Should have been a reply but wasn't"); -// return hdr; -// } - } -} diff --git a/MongoDBDriver/Protocol/RequestMessageBase.cs b/MongoDBDriver/Protocol/RequestMessageBase.cs deleted file mode 100644 index c29388ea..00000000 --- a/MongoDBDriver/Protocol/RequestMessageBase.cs +++ /dev/null @@ -1,35 +0,0 @@ -using System.IO; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// Description of Message. - /// - public abstract class RequestMessageBase : MessageBase, IRequestMessage - { - public void Write (Stream stream){ - MessageHeader header = this.Header; - BufferedStream bstream = new BufferedStream(stream); - BinaryWriter writer = new BinaryWriter(bstream); - BsonWriter bwriter = new BsonWriter(bstream); - - Header.MessageLength += this.CalculateBodySize(bwriter); - if(Header.MessageLength > MessageBase.MaximumMessageSize){ - throw new MongoException("Maximum message length exceeded"); - } - - writer.Write(header.MessageLength); - writer.Write(header.RequestId); - writer.Write(header.ResponseTo); - writer.Write((int)header.OpCode); - writer.Flush(); - WriteBody(bwriter); - bwriter.Flush(); - } - - protected abstract void WriteBody(BsonWriter writer); - - protected abstract int CalculateBodySize(BsonWriter writer); - } -} diff --git a/MongoDBDriver/Protocol/UpdateMessage.cs b/MongoDBDriver/Protocol/UpdateMessage.cs deleted file mode 100644 index dfd4036b..00000000 --- a/MongoDBDriver/Protocol/UpdateMessage.cs +++ /dev/null @@ -1,49 +0,0 @@ -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver.Protocol -{ - /// - /// - /// - /// - /// struct { - /// MsgHeader header; // standard message header - /// int32 ZERO; // 0 - reserved for future use - /// cstring fullCollectionName; // "dbname.collectionname" - /// int32 flags; // value 0 for upsert 1 for multiupdate operation - /// BSON selector; // the query to select the document - /// BSON document; // the document data to update with or insert - /// } - /// - public class UpdateMessage : RequestMessageBase - { - public string FullCollectionName { get; set; } - - public Document Selector { get; set; } - - public Document Document { get; set; } - - public int Flags { get; set; } - - public UpdateMessage(){ - this.Header = new MessageHeader(OpCode.Update); - } - - protected override void WriteBody (BsonWriter writer){ - writer.WriteValue(BsonDataType.Integer,0); - writer.WriteString(this.FullCollectionName); - writer.WriteValue(BsonDataType.Integer,this.Flags); - writer.Write(Selector); - writer.Write(Document); - } - - protected override int CalculateBodySize(BsonWriter writer){ - int size = 4; //first int32 - size += writer.CalculateSize(this.FullCollectionName,false); - size += 4; //flags - size += writer.CalculateSize(this.Selector); - size += writer.CalculateSize(this.Document); - return size; - } - } -} \ No newline at end of file diff --git a/MongoDBDriver/QueryOptions.cs b/MongoDBDriver/QueryOptions.cs deleted file mode 100644 index 6b10bf33..00000000 --- a/MongoDBDriver/QueryOptions.cs +++ /dev/null @@ -1,9 +0,0 @@ -namespace MongoDB.Driver -{ - public enum QueryOptions { - None = 0, - TailableCursor = 2, - SlaveOK = 4, - NoCursorTimeout = 16 - } -} \ No newline at end of file diff --git a/MongoDBDriver/UpdateFlags.cs b/MongoDBDriver/UpdateFlags.cs deleted file mode 100644 index cb53e5e6..00000000 --- a/MongoDBDriver/UpdateFlags.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace MongoDB.Driver -{ - public enum UpdateFlags { - Upsert = 1, - MultiUpdate = 2 - } -} \ No newline at end of file diff --git a/MongoDBDriver/bin/Debug/.gitignore b/MongoDBDriver/bin/Debug/.gitignore deleted file mode 100644 index 5d657c34..00000000 --- a/MongoDBDriver/bin/Debug/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.mdb - diff --git a/MongoDBDriver/bin/Debug/en/LC_MESSAGES/.mo b/MongoDBDriver/bin/Debug/en/LC_MESSAGES/.mo deleted file mode 100644 index 1e036f81..00000000 Binary files a/MongoDBDriver/bin/Debug/en/LC_MESSAGES/.mo and /dev/null differ diff --git a/MongoDBDriver2010.sln b/MongoDBDriver2010.sln deleted file mode 100644 index 304f3550..00000000 --- a/MongoDBDriver2010.sln +++ /dev/null @@ -1,92 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 10.00 -# Visual Studio 2008 -# SharpDevelop 3.1.1.5327 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver", "MongoDBDriver\MongoDB.Driver.csproj", "{B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}" -EndProject -Project("{9344bdbb-3e7f-41fc-a0dd-8665d75ee146}") = "Packages", "Packages.mdproj", "{502F3381-58AA-461B-B9D8-12578A588C61}" -EndProject -Project("{9344bdbb-3e7f-41fc-a0dd-8665d75ee146}") = "MongoDBDriverTranslation", "MongoDBDriverTranslation\MongoDBDriverTranslation.mdproj", "{DCBE47DD-59A6-4212-AA4A-142838088B69}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver.Tests", "MongoDB.Net-Tests\MongoDB.Driver.Tests.csproj", "{C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Linq", "MongoDB.Linq\MongoDB.Linq.csproj", "{2E48891E-72F9-445D-9A5A-DBA787BFFE9E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Linq.Tests", "MongoDB.Linq.Tests\MongoDB.Linq.Tests.csproj", "{870FE8E1-3461-4C79-BF25-9C35E41BF582}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.Driver.Benchmark", "MongoDB.Driver.Benchmark\MongoDB.Driver.Benchmark.csproj", "{5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS", "MongoDB.GridFS\MongoDB.GridFS.csproj", "{B42DBBF9-0A1F-4749-9787-013BF8D8F435}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDB.GridFS.Tests", "MongoDB.GridFS.Tests\MongoDB.GridFS.Tests.csproj", "{0C293FE9-F670-4FEF-A60F-20F8C978B1CD}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0C293FE9-F670-4FEF-A60F-20F8C978B1CD}.Release|Any CPU.Build.0 = Release|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2E48891E-72F9-445D-9A5A-DBA787BFFE9E}.Release|Any CPU.Build.0 = Release|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B}.Release|Any CPU.Build.0 = Release|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Debug|Any CPU.Build.0 = Debug|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Release|Any CPU.ActiveCfg = Release|Any CPU - {870FE8E1-3461-4C79-BF25-9C35E41BF582}.Release|Any CPU.Build.0 = Release|Any CPU - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3}.Release|Any CPU.Build.0 = Release|Any CPU - {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B42DBBF9-0A1F-4749-9787-013BF8D8F435}.Release|Any CPU.Build.0 = Release|Any CPU - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02}.Release|Any CPU.Build.0 = Release|Any CPU - {DCBE47DD-59A6-4212-AA4A-142838088B69}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DCBE47DD-59A6-4212-AA4A-142838088B69}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DCBE47DD-59A6-4212-AA4A-142838088B69}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DCBE47DD-59A6-4212-AA4A-142838088B69}.Release|Any CPU.Build.0 = Release|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Debug|Any CPU.Build.0 = Debug|Any CPU - {502F3381-58AA-461B-B9D8-12578A588C61}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(MonoDevelopProperties) = preSolution - StartupItem = MongoDBDriver\MongoDB.Driver.csproj - Policies = $0 - $0.DotNetNamingPolicy = $1 - $1.DirectoryNamespaceAssociation = None - $1.ResourceNamePolicy = FileFormatDefault - $0.TextStylePolicy = $2 - $2.FileWidth = 120 - $2.TabWidth = 4 - $2.inheritsSet = Mono - $2.inheritsScope = text/plain - $2.scope = text/plain - $0.TextStylePolicy = $3 - $3.FileWidth = 120 - $3.NoTabsAfterNonTabs = True - $3.inheritsSet = VisualStudio - $3.inheritsScope = text/plain - $3.scope = text/x-csharp - $0.CSharpFormattingPolicy = $4 - $4.inheritsSet = Mono - $4.inheritsScope = text/x-csharp - $4.scope = text/x-csharp - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/Packages.mdproj b/Packages.mdproj deleted file mode 100644 index c7163d49..00000000 --- a/Packages.mdproj +++ /dev/null @@ -1,56 +0,0 @@ - - - - PackagingProject - 8.0.50727 - 2.0 - {502F3381-58AA-461B-B9D8-12578A588C61} - - - - - - - - - - - MongoDB.Linq,ProgramFiles,MongoDB.Linq.dll.mdb - MongoDB.Linq,ProgramFiles,MongoDB.Driver.dll.mdb - MongoDB.GridFS,ProgramFiles,MongoDB.GridFS.dll.mdb - MongoDB.GridFS,ProgramFiles,MongoDB.Driver.dll.mdb - MongoDB.Driver,ProgramFiles,MongoDB.Driver.dll.mdb - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/README.rst b/README.rst index 41fa7a6d..b913ec9d 100644 --- a/README.rst +++ b/README.rst @@ -41,7 +41,7 @@ Patches ======= Patches are welcome and will likely be accepted. By submitting a patch you assign the copyright to me, Sam Corder. This is necessary to simplify the number of copyright holders should it become necessary that the copyright need to be reassigned or the code relicensed. The code will always be available under an OSI approved license. -A bug fix patch should contain a test case that reproduces the issue along with the actual fix. Try to follow the same style that the code is already in so that things remain clean. +A bug fix patch should contain a test case that reproduces the issue along with the actual fix. Try to follow the same style that the code is already in so that things remain clean. Keep your whitespace settings the same as the code to make reading and applying diffs manageable. We use 4 spaces for tabs and Windows line endings. Usage ===== @@ -62,15 +62,22 @@ Getting Help ============ The Google Group mongodb-csharp at (http://groups.google.com/group/mongodb-csharp) is the best place to go. +Reporting Bugs +============== +The bug tracker is the same as the Mongodb bug tracker located at http://jira.mongodb.org + Contributors ============ - Sam Corder (samus) +- Steve Wagner (lanwin) +- Craig Wilson (craiggwilson) - Seth Edwards (Sedward) - Arne Classen (Sdether) -- Steve Wagner (lanwin) - Andrew Rondeau (GWBasic) - Doug Mayer (dougtmayer) - Andrew Kempe - Kevin Smith (codebrulee) - Rashadh (rashadh) - Sergey Bartunov (sbos) +- David O'Hara (davidmohara) +- Tim Raybrun (trayburn) diff --git a/StrongName.snk b/StrongName.snk new file mode 100644 index 00000000..c5dfd6e3 Binary files /dev/null and b/StrongName.snk differ diff --git a/examples/Simple/Main.cs b/examples/Simple/Main.cs index 8539644b..237d2c4a 100644 --- a/examples/Simple/Main.cs +++ b/examples/Simple/Main.cs @@ -1,99 +1,186 @@ -using System; -using System.Configuration; - -using MongoDB.Driver; - -namespace Simple -{ - /// - /// Illustrates some simple operations on the database. - /// Creating a database connection. - /// Remove some documents. - /// Insert some documents - /// Find one document - /// Find several documents and iterate through them. - /// - class MainClass - { - Mongo mongo; - Database simple; - IMongoCollection categories; - - public static void Main (string[] args){ - MainClass main = new MainClass(); - main.Setup(); - main.Run(); - } - - - /// - /// Setup the collection and insert some data into it. - /// - public void Setup(){ - string connstr = ConfigurationManager.AppSettings["simple"]; - if(String.IsNullOrEmpty(connstr)) throw new ArgumentNullException("Connection string not found."); - mongo = new Mongo(connstr); - mongo.Connect(); - simple = mongo["simple"]; - categories = simple["categories"]; - Clean(); - - var names = new string[]{"Bluez", "Jazz", "Classical", "Rock", "Oldies", "Heavy Metal"}; - foreach(string name in names){ - categories.Insert(new Document(){{"name", name}}); - } - } - - public void Clean(){ - categories.Delete(new Document(){{"name", "Jazz"}}); //remove documents with the name Jazz. - categories.Delete(new Document()); //remove everything from the categories collection. - } - - public void Run(){ - var category = categories.FindOne(new Document { { "name", "Bluez" } }); - - Console.WriteLine ("The id findOne" + category["_id"]); - - Document selector = new Document(){{"_id", category["_id"]}}; - - category["name"] = "Bluess"; - //The following will do the same thing. - categories.Update(category); - - Console.WriteLine("Category after one update " + categories.FindOne(selector).ToString()); - - category["name"] = "Blues"; - categories.Update(category, selector); - - Console.WriteLine("Category after two updates " + categories.FindOne(selector).ToString()); - - //Find it by _id that has been converted to a string now. - string id = ((Oid)category["_id"]).ToString(); - - Console.WriteLine("Found by string id converted back to Oid"); - Console.WriteLine(categories.FindOne(new Document(){{"_id", id.ToOid()}})); - - //Find(new Document()) is equivalent to FindAll(); - //Specifying the cursor in a using block will close it on the server if we decide not - //to iterate through the whole thing. - using(ICursor all = categories.Find(new Document())){ - foreach(Document doc in all.Documents){ - Console.WriteLine(doc.ToString()); - } - } - - mongo.Disconnect(); - } - - } -} - -public static class OidExtensions -{ - public static Oid ToOid (this string str){ - if (str.Length == 24) - return new Oid (str); - - return new Oid (str.Replace ("\"", "")); - } -} +using System; +using System.Configuration; +using System.Linq; +using MongoDB; +using MongoDB.Configuration; +using MongoDB.Linq; + +namespace Simple +{ + /// + /// Illustrates some simple operations on the database. + /// Creating a database connection. + /// Remove some documents. + /// Insert some documents + /// Find one document + /// Find several documents and iterate through them. + /// + internal class MainClass + { + private IMongoCollection categories; + private Mongo mongo; + private IMongoDatabase simple; + + private class MyClass + { + public string Name { get; set; } + public int Corners { get; set; } + } + + private class SubClass : MyClass + { + public double Ratio { get; set; } + } + + public static void Main(string[] args) + { + var config = new MongoConfigurationBuilder(); + + // COMMENT OUT FROM HERE + config.Mapping(mapping => + { + mapping.DefaultProfile(profile => + { + profile.SubClassesAre(t => t.IsSubclassOf(typeof(MyClass))); + }); + mapping.Map(); + mapping.Map(); + }); + // TO HERE + + config.ConnectionString("Server=127.0.0.1"); + + using (Mongo mongo = new Mongo(config.BuildConfiguration())) + { + mongo.Connect(); + try + { + var db = mongo.GetDatabase("TestDb"); + var collection = db.GetCollection(); + + MyClass square = new MyClass() + { + Corners = 4, + Name = "Square" + }; + + MyClass circle = new MyClass() + { + Corners = 0, + Name = "Circle" + }; + + SubClass sub = new SubClass() + { + Name = "SubClass", + Corners = 6, + Ratio = 3.43 + }; + + collection.Save(square); + collection.Save(circle); + collection.Save(sub); + + var superclass = (from item in db.GetCollection("MyClass").Linq() + where item.Corners > 1 + select item.Corners).ToList(); + + var subclass = (from item in db.GetCollection("MyClass").Linq() + where item.Ratio > 1 + select item.Corners).ToList(); + + Console.WriteLine("Count by LINQ on typed collection: {0}", collection.Linq().Count(x => x.Corners > 1)); + Console.WriteLine("Count by LINQ on typed collection2: {0}", db.GetCollection().Linq().Count(x => x.Corners > 1)); + //Console.WriteLine("Count by LINQ on typed collection3: {0}", db.GetCollection().Count(new { Corners = Op.GreaterThan(1) })); + Console.WriteLine("Count on typed collection: {0}", collection.Count(new { Corners = Op.GreaterThan(1) })); + + var coll = db.GetCollection("MyClass"); + var count = coll.Count(new Document("Corners", Op.GreaterThan(1))); + Console.WriteLine("Count: {0}", count); + Console.ReadKey(); + } + finally + { + mongo.Disconnect(); + } + } + + //var main = new MainClass(); + //main.Setup(); + //main.Run(); + } + + /// + /// Setup the collection and insert some data into it. + /// + public void Setup() + { + var connstr = ConfigurationManager.AppSettings["simple"]; + if(String.IsNullOrEmpty(connstr)) + throw new ArgumentNullException("Connection string not found."); + mongo = new Mongo(connstr); + mongo.Connect(); + simple = mongo["simple"]; + categories = simple.GetCollection("categories"); + Clean(); + + var names = new[] {"Bluez", "Jazz", "Classical", "Rock", "Oldies", "Heavy Metal"}; + foreach(var name in names) + categories.Insert(new Document {{"name", name}}); + } + + public void Clean() + { + categories.Remove(new Document {{"name", "Jazz"}}); //remove documents with the name Jazz. + categories.Remove(new Document()); //remove everything from the categories collection. + } + + public void Run() + { + var category = categories.FindOne(new Document {{"name", "Bluez"}}); + + Console.WriteLine("The id findOne" + category["_id"]); + + var selector = new Document {{"_id", category["_id"]}}; + + category["name"] = "Bluess"; + //The following will do the same thing. + categories.Save(category); + + Console.WriteLine("Category after one update " + categories.FindOne(selector)); + + category["name"] = "Blues"; + categories.Update(category, selector); + + Console.WriteLine("Category after two updates " + categories.FindOne(selector)); + + //Find it by _id that has been converted to a string now. + var id = (category["_id"]).ToString(); + + Console.WriteLine("Found by string id converted back to Oid"); + Console.WriteLine(categories.FindOne(new Document {{"_id", id.ToOid()}})); + + //Find(new Document()) is equivalent to FindAll(); + //Specifying the cursor in a using block will close it on the server if we decide not + //to iterate through the whole thing. + using(var all = categories.Find(new Document())) + { + foreach(var doc in all.Documents) + Console.WriteLine(doc.ToString()); + } + + mongo.Disconnect(); + } + } +} + +public static class OidExtensions +{ + public static Oid ToOid(this string str) + { + if(str.Length == 24) + return new Oid(str); + + return new Oid(str.Replace("\"", "")); + } +} \ No newline at end of file diff --git a/examples/Simple/Simple.csproj b/examples/Simple/Simple.csproj index 692ab0e4..b6aa7839 100644 --- a/examples/Simple/Simple.csproj +++ b/examples/Simple/Simple.csproj @@ -1,15 +1,34 @@  - + Debug AnyCPU - 9.0.21022 + 9.0.30729 2.0 {131BDB5F-5C6F-4AC7-B03E-394B1B75E120} Exe Simple Simple v3.5 + + + 3.5 + + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true true @@ -19,6 +38,7 @@ DEBUG prompt 4 + AllRules.ruleset none @@ -26,20 +46,22 @@ bin\Release prompt 4 + AllRules.ruleset + - + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver + MongoDB @@ -48,4 +70,21 @@ PreserveNewest + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + \ No newline at end of file diff --git a/examples/SimpleVB/Application.vb b/examples/SimpleVB/Application.vb index f866cdc2..32e83e5f 100644 --- a/examples/SimpleVB/Application.vb +++ b/examples/SimpleVB/Application.vb @@ -1,7 +1,8 @@ imports System imports System.Configuration +Imports Microsoft.VisualBasic -imports MongoDB.Driver +Imports MongoDB Namespace Simple @@ -15,11 +16,12 @@ Namespace Simple ''' Public Class Application Private mongo as Mongo - Private simple as Database - Private categories as IMongoCollection + Private simple as IMongoDatabase + Private categories As IMongoCollection(Of Document) Public Shared Sub Main() Dim app As New Application() + app.Setup() app.Run() Console.WriteLine("Press any key to continue...") @@ -35,30 +37,30 @@ Namespace Simple mongo = new Mongo(connstr) mongo.Connect() simple = mongo("simple") - categories = simple("categories") + categories = simple.GetCollection(Of Document)("categories") Clean() Dim names() As String = {"Bluez", "Jazz", "Classical", "Rock", "Oldies", "Heavy Metal"} For Each name As string In names - categories.Insert(new Document().Append("name", name)) + categories.Insert(New Document().Add("name", name)) Next End Sub Public Sub Clean() - categories.Delete(new Document().Append("name", "Jazz")) 'remove documents with the name Jazz. - categories.Delete(new Document()) 'remove everything from the categories collection. + categories.Remove(New Document().Add("name", "Jazz")) 'remove documents with the name Jazz. + categories.Remove(New Document()) 'remove everything from the categories collection. End Sub Public Sub Run() - Dim category As Document = categories.FindOne(new Document().Append("name", "Bluez")) + Dim category As Document = categories.FindOne(New Document().Add("name", "Bluez")) Console.WriteLine ("The id findOne" & category("_id").ToString()) - Dim selector As Document = New Document().Append("_id", category("_id")) + Dim selector As Document = New Document().Add("_id", category("_id")) category("name") = "Bluess" 'The following will do the same thing. - categories.Update(category) + categories.Save(category) Console.WriteLine("Category after one update " + categories.FindOne(selector).ToString()) @@ -71,12 +73,12 @@ Namespace Simple Dim id As String = CType(category("_id"),Oid).ToString() Console.WriteLine("Found by string id converted back to Oid") - Console.WriteLine(categories.FindOne(new Document().Append("_id", new Oid(id.Replace("""", "")))).ToString()) + Console.WriteLine(categories.FindOne(New Document().Add("_id", New Oid(id.Replace("""", "")))).ToString()) 'Find(new Document()) is equivalent to FindAll() 'Specifying the cursor in a using block will close it on the server if we decide not 'to iterate through the whole thing. - Dim all As ICursor = categories.Find(New Document()) + Dim all As ICursor(Of Document) = categories.Find(New Document()) Try For Each doc As Document In all.Documents Console.WriteLine(doc.ToString()) diff --git a/examples/SimpleVB/SimpleVB.vbproj b/examples/SimpleVB/SimpleVB.vbproj index 3c1ab5bd..56a0da12 100644 --- a/examples/SimpleVB/SimpleVB.vbproj +++ b/examples/SimpleVB/SimpleVB.vbproj @@ -1,9 +1,9 @@  - + Debug AnyCPU - 9.0.21022 + 9.0.30729 2.0 {1BAAE3D8-7720-4AA5-9335-E59824E7B667} Exe @@ -12,6 +12,25 @@ On SimpleVB v3.5 + + + 3.5 + + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true true @@ -20,6 +39,8 @@ false false + 42353,42354,42355 + AllRules.ruleset bin\Release @@ -27,11 +48,15 @@ false false + 42353,42354,42355 + AllRules.ruleset + + @@ -41,10 +66,30 @@ - + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} - MongoDB.Driver + MongoDB + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + + + + \ No newline at end of file diff --git a/MongoDB.GridFS.Tests/GridFileInfoTest.cs b/source/MongoDB.GridFS.Tests/GridFileInfoTest.cs similarity index 95% rename from MongoDB.GridFS.Tests/GridFileInfoTest.cs rename to source/MongoDB.GridFS.Tests/GridFileInfoTest.cs index 81373e1c..2c3617da 100644 --- a/MongoDB.GridFS.Tests/GridFileInfoTest.cs +++ b/source/MongoDB.GridFS.Tests/GridFileInfoTest.cs @@ -4,7 +4,7 @@ using NUnit.Framework; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { @@ -68,9 +68,11 @@ public void TestDelete(){ String filename = "gfi-delete.txt"; GridFile gf = new GridFile(DB,"gfdelete"); GridFileInfo gfi = new GridFileInfo(DB,"gfdelete", filename); + var id = gfi.Id; GridFileStream gfs = gfi.Create(); //TODO Expand Test to make sure that chunks for the file got deleted too. gfi.Delete(); Assert.IsFalse(gf.Exists(filename), "File should have been deleted."); + Assert.IsTrue(0 == gf.Chunks.Count(new Document("_id", id))); } [Test] diff --git a/MongoDB.GridFS.Tests/GridFileStreamTest.cs b/source/MongoDB.GridFS.Tests/GridFileStreamTest.cs old mode 100755 new mode 100644 similarity index 96% rename from MongoDB.GridFS.Tests/GridFileStreamTest.cs rename to source/MongoDB.GridFS.Tests/GridFileStreamTest.cs index 7532005b..307709eb --- a/MongoDB.GridFS.Tests/GridFileStreamTest.cs +++ b/source/MongoDB.GridFS.Tests/GridFileStreamTest.cs @@ -3,7 +3,7 @@ using NUnit.Framework; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { @@ -343,7 +343,7 @@ public void TestSeekingBeyondEOF(){ protected Document GrabChunk(Object fileid, int chunk){ - return DB[filesystem + ".chunks"].FindOne(new Document().Append("files_id", fileid).Append("n", chunk)); + return DB[filesystem + ".chunks"].FindOne(new Document().Add("files_id", fileid).Add("n", chunk)); } protected Object CreateDummyFile(string filename, int size, int chunksize, int initialOffset){ diff --git a/MongoDB.GridFS.Tests/GridFileTest.cs b/source/MongoDB.GridFS.Tests/GridFileTest.cs similarity index 68% rename from MongoDB.GridFS.Tests/GridFileTest.cs rename to source/MongoDB.GridFS.Tests/GridFileTest.cs index 32c4c786..6c25ee1c 100644 --- a/MongoDB.GridFS.Tests/GridFileTest.cs +++ b/source/MongoDB.GridFS.Tests/GridFileTest.cs @@ -3,7 +3,7 @@ using NUnit.Framework; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { @@ -54,6 +54,23 @@ public void TestModeCreateNew(){ tw.Close(); } Assert.AreEqual(1, CountChunks("gfcreate", id)); - } + } + + [Test] + public void TestDeleteRemovesChunks(){ + Object id; + string filename = "deletebyname.txt"; + string fs = "fs"; + GridFile gf = new GridFile(DB,fs); + using(GridFileStream gfs = gf.Create(filename, FileMode.CreateNew)){ + id = gfs.GridFileInfo.Id; + TextWriter tw = new StreamWriter(gfs); + tw.WriteLine("test"); + tw.Close(); + } + Assert.AreEqual(1, CountChunks(fs, id), "Chunks not found"); + gf.Delete("deletebyname.txt"); + Assert.AreEqual(0, CountChunks(fs, id), "Chunks found"); + } } } diff --git a/MongoDB.GridFS.Tests/GridTestBase.cs b/source/MongoDB.GridFS.Tests/GridTestBase.cs old mode 100755 new mode 100644 similarity index 92% rename from MongoDB.GridFS.Tests/GridTestBase.cs rename to source/MongoDB.GridFS.Tests/GridTestBase.cs index 221124ab..3c95a72c --- a/MongoDB.GridFS.Tests/GridTestBase.cs +++ b/source/MongoDB.GridFS.Tests/GridTestBase.cs @@ -4,7 +4,7 @@ using NUnit.Framework; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { @@ -31,7 +31,7 @@ public override string TestCollections { } public long CountChunks(string filesystem, Object fileid){ - return DB[filesystem + ".chunks"].Count(new Document().Append("files_id", fileid)); + return DB[filesystem + ".chunks"].Count(new Document().Add("files_id", fileid)); } } diff --git a/source/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj b/source/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj new file mode 100644 index 00000000..f07f774f --- /dev/null +++ b/source/MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj @@ -0,0 +1,110 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {0C293FE9-F670-4FEF-A60F-20F8C978B1CD} + Library + MongoDB.GridFS.Tests + v3.5 + MongoDB.GridFS.Tests + true + ..\..\StrongName.snk + + + + + 3.5 + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true + + + true + full + false + bin\Debug + DEBUG + prompt + 4 + false + AllRules.ruleset + + + none + false + bin\Release + prompt + 4 + false + AllRules.ruleset + + + + + + False + ..\..\redist\nunit.framework.dll + + + + + + + + + StrongName.snk + + + App.config + PreserveNewest + + + + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + + + + {B42DBBF9-0A1F-4749-9787-013BF8D8F435} + MongoDB.GridFS + + + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02} + MongoDB.Tests + + + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} + MongoDB + + + + \ No newline at end of file diff --git a/source/MongoDB.GridFS/AssemblyInfo.cs b/source/MongoDB.GridFS/AssemblyInfo.cs new file mode 100644 index 00000000..39b333a3 --- /dev/null +++ b/source/MongoDB.GridFS/AssemblyInfo.cs @@ -0,0 +1,11 @@ +using System.Reflection; +using System.Runtime.CompilerServices; + +// Information about this assembly is defined by the following attributes. +// Change them to the values specific to your project. + +[assembly: AssemblyTitle("MongoDB.GridFS")] +[assembly: AssemblyDescription("GridFS implementation for MongoDB-CSharp driver")] +[assembly: AssemblyProduct("MongoDB-CSharp")] + +[assembly: InternalsVisibleTo("MongoDB.GridFS.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100ed9e936c4563336be2e14ca802ea727ff49cad3bb1c0b287beed2a9b5eb823c4c44becc80be4bb11dcd7e49d5d6171f68b488853dcbdeb3152ea3db95ba13a70855a715ee21ac76b67f50bcbc93f2e29e409530a00b98fa79b06ac008dd1f4f3582ba6746af3d218b43b70a63254b094be1a2d493590837273f357fc56b2a7a0")] diff --git a/source/MongoDB.GridFS/GridException.cs b/source/MongoDB.GridFS/GridException.cs new file mode 100644 index 00000000..bec61639 --- /dev/null +++ b/source/MongoDB.GridFS/GridException.cs @@ -0,0 +1,32 @@ +using System; + +namespace MongoDB.GridFS +{ + /// + /// + /// + public class MongoGridFSException : Exception + { + private string filename; + /// + /// Gets the filename. + /// + /// The filename. + public string Filename + { + get { return filename; } + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The filename. + /// The inner. + public MongoGridFSException(string message, string filename, Exception inner) + : base(message, inner) + { + this.filename = filename; + } + } +} diff --git a/MongoDB.GridFS/GridFile.cs b/source/MongoDB.GridFS/GridFile.cs old mode 100755 new mode 100644 similarity index 54% rename from MongoDB.GridFS/GridFile.cs rename to source/MongoDB.GridFS/GridFile.cs index a147cd46..835758dd --- a/MongoDB.GridFS/GridFile.cs +++ b/source/MongoDB.GridFS/GridFile.cs @@ -1,46 +1,83 @@ using System; using System.IO; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { + /// + /// + /// public class GridFile{ - private Database db; + private IMongoDatabase db; private string name; + /// + /// Gets the name. + /// + /// The name. public string Name { get { return name; } } - + private IMongoCollection files; - public IMongoCollection Files{ + /// + /// Gets the files. + /// + /// The files. + public IMongoCollection Files + { get { return this.files; } } private IMongoCollection chunks; - public IMongoCollection Chunks{ + /// + /// Gets the chunks. + /// + /// The chunks. + public IMongoCollection Chunks + { get { return this.chunks; } - } - - public GridFile(Database db):this(db,"fs"){} + } - public GridFile(Database db, string bucket){ + /// + /// Initializes a new instance of the class. + /// + /// The db. + public GridFile(IMongoDatabase db):this(db,"fs"){} + + /// + /// Initializes a new instance of the class. + /// + /// The db. + /// The bucket. + public GridFile(IMongoDatabase db, string bucket){ this.db = db; this.files = db[bucket + ".files"]; this.chunks = db[bucket + ".chunks"]; - this.chunks.MetaData.CreateIndex(new Document().Append("files_id", 1).Append("n", 1),true); + this.chunks.Metadata.CreateIndex(new Document().Add("files_id", 1).Add("n", 1), true); + this.files.Metadata.CreateIndex(new Document().Add("filename", 1).Add("n", 1), false); this.name = bucket; } - + + /// + /// Lists the files. + /// + /// public ICursor ListFiles(){ return this.ListFiles(new Document()); } - - public ICursor ListFiles(Document query){ - return this.files.Find(new Document().Append("query",query) - .Append("orderby", new Document() - .Append("filename", 1))); + + /// + /// Lists the files. + /// + /// The query. + /// + public ICursor ListFiles(Document query) + { + return this.files.Find(new Document().Add("query", query) + .Add("orderby", new Document() + .Add("filename", 1))); } /// @@ -52,8 +89,8 @@ public ICursor ListFiles(Document query){ public void Copy(String src, String dest){ if(Exists(src) == false) throw new FileNotFoundException("Not found in the database.", src); if(Exists(dest) == true) throw new IOException("Destination file already exists."); - - Document scope = new Document().Append("bucket", this.name).Append("srcfile", src).Append("destfile",dest); + + Document scope = new Document().Add("bucket", this.name).Add("srcfile", src).Add("destfile", dest); String func ="function(){\n" + //" print(\"copying \" + srcfile);\n" + " var files = db[bucket + \".files\"];\n" + @@ -76,18 +113,36 @@ public void Copy(String src, String dest){ " }\n" + " return false;\n" + "}"; - Document result = db.Eval(func,scope); + db.Eval(func,scope); } #region Create + /// + /// Creates the specified filename. + /// + /// The filename. + /// public GridFileStream Create(String filename){ return Create(filename, FileMode.Create); } - + + /// + /// Creates the specified filename. + /// + /// The filename. + /// The mode. + /// public GridFileStream Create(String filename, FileMode mode){ return Create(filename,mode,FileAccess.ReadWrite); } - + + /// + /// Creates the specified filename. + /// + /// The filename. + /// The mode. + /// The access. + /// public GridFileStream Create(String filename, FileMode mode, FileAccess access){ //Create is delegated to a GridFileInfo because the stream needs access to the gfi and it //is easier to do it this way and only write the implementation once. @@ -98,15 +153,32 @@ public GridFileStream Create(String filename, FileMode mode, FileAccess access){ #endregion #region Opens + /// + /// Opens the specified filename. + /// + /// The filename. + /// The mode. + /// The access. + /// public GridFileStream Open(string filename, FileMode mode, FileAccess access){ return new GridFileInfo(this.db, this.name, filename).Open(mode, access); } + /// + /// Opens the read. + /// + /// The filename. + /// public GridFileStream OpenRead(String filename){ GridFileInfo gfi = new GridFileInfo(this.db, this.name, filename); return gfi.OpenRead(); } + /// + /// Opens the write. + /// + /// The filename. + /// public GridFileStream OpenWrite(String filename){ GridFileInfo gfi = new GridFileInfo(this.db, this.name, filename); return gfi.OpenWrite(); @@ -120,15 +192,15 @@ public GridFileStream OpenWrite(String filename){ /// Permanently removes a file from the database. /// public void Delete(Object id){ - files.Delete(new Document().Append("_id",id)); - chunks.Delete(new Document().Append("files_id",id)); + files.Remove(new Document().Add("_id", id)); + chunks.Remove(new Document().Add("files_id", id)); } /// /// Permanently removes a file from the database. /// public void Delete(String filename){ - files.Delete(new Document().Append("filename",filename)); + this.Delete(new Document().Add("filename", filename)); } /// @@ -146,23 +218,33 @@ public void Delete(Document query ){ /// Gets a value indicating whether the file exists. /// public Boolean Exists(string name){ - return this.files.FindOne(new Document().Append("filename",name)) != null; + return this.files.FindOne(new Document().Add("filename", name)) != null; } /// /// Gets a value indicating whether the file exists. /// public Boolean Exists(Object id){ - return this.files.FindOne(new Document().Append("_id",id)) != null; + return this.files.FindOne(new Document().Add("_id", id)) != null; } #endregion #region Move + /// + /// Moves the specified SRC. + /// + /// The SRC. + /// The dest. public void Move(String src, String dest){ - this.files.Update(new Document().Append("$set", new Document().Append("filename",dest)), new Document().Append("filename", src)); + this.files.Update(new Document().Add("$set", new Document().Add("filename", dest)), new Document().Add("filename", src)); } - + + /// + /// Moves the specified id. + /// + /// The id. + /// The dest. public void Move(Object id, String dest){ - this.files.Update(new Document().Append("$set", new Document().Append("filename",dest)), new Document().Append("_id", id)); + this.files.Update(new Document().Add("$set", new Document().Add("filename", dest)), new Document().Add("_id", id)); } #endregion diff --git a/MongoDB.GridFS/GridFileInfo.cs b/source/MongoDB.GridFS/GridFileInfo.cs similarity index 70% rename from MongoDB.GridFS/GridFileInfo.cs rename to source/MongoDB.GridFS/GridFileInfo.cs index 076b3906..39aad7eb 100644 --- a/MongoDB.GridFS/GridFileInfo.cs +++ b/source/MongoDB.GridFS/GridFileInfo.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.IO; -using MongoDB.Driver; +using MongoDB; namespace MongoDB.GridFS { @@ -18,24 +18,36 @@ public class GridFileInfo private const string DEFAULT_CONTENT_TYPE = "text/plain"; private GridFile gridFile; - private Database db; + private IMongoDatabase db; private string bucket; #region "filedata properties" private Document filedata = new Document(); + /// + /// Gets or sets the id. + /// + /// The id. public Object Id{ get { return filedata["_id"]; } set { filedata["_id"] = value; } } + /// + /// Gets or sets the name of the file. + /// + /// The name of the file. public string FileName { get { return (String)filedata["filename"]; } set { filedata["filename"] = value; } } + /// + /// Gets or sets the type of the content. + /// + /// The type of the content. public string ContentType{ get { return (String)filedata["contentType"]; } set { filedata["contentType"] = value; } @@ -50,41 +62,66 @@ public long Length{ set { filedata["length"] = value; } } + /// + /// Gets or sets the aliases. + /// + /// The aliases. public IList Aliases{ get { - if(filedata.Contains("aliases") == false || filedata["aliases"] == null){ + if(filedata.ContainsKey("aliases") == false || filedata["aliases"] == null){ return null; } if(filedata["aliases"] is IList){ return (List)filedata["aliases"]; - }else{ - return null; } + return new List(); } set { filedata["aliases"] = value; } } + /// + /// Gets or sets the size of the chunk. + /// + /// The size of the chunk. public int ChunkSize{ get { return Convert.ToInt32(filedata["chunkSize"]); } set { filedata["chunkSize"] = value; } } + /// + /// Gets the metadata. + /// + /// The metadata. public Object Metadata{ get { return (Document)filedata["metadata"]; } } + /// + /// Gets or sets the upload date. + /// + /// The upload date. public DateTime? UploadDate{ get { return Convert.ToDateTime(filedata["uploadDate"]); } set { filedata["uploadDate"] = value; } } + /// + /// Gets or sets the MD5. + /// + /// The MD5. public string Md5{ get { return (String)filedata["md5"]; } set { filedata["md5"] = value; } } #endregion - public GridFileInfo(Database db, string bucket, string filename){ + /// + /// Initializes a new instance of the class. + /// + /// The db. + /// The bucket. + /// The filename. + public GridFileInfo(IMongoDatabase db, string bucket, string filename){ this.db = db; this.bucket = bucket; this.gridFile = new GridFile(db,bucket); @@ -92,7 +129,12 @@ public GridFileInfo(Database db, string bucket, string filename){ if(gridFile.Exists(filename)) this.LoadFileData(); } - public GridFileInfo(Database db, string filename){ + /// + /// Initializes a new instance of the class. + /// + /// The db. + /// The filename. + public GridFileInfo(MongoDatabase db, string filename){ this.db = db; this.bucket = "fs"; this.gridFile = new GridFile(db); @@ -112,15 +154,27 @@ private void SetFileDataDefaults(string filename){ /// /// Creates the file named FileName and returns the GridFileStream /// - /// If the file already exists + /// + /// If the file already exists public GridFileStream Create(){ return Create(FileMode.CreateNew); } - + + /// + /// Creates the specified mode. + /// + /// The mode. + /// public GridFileStream Create(FileMode mode){ return Create(mode,FileAccess.ReadWrite); - } - + } + + /// + /// Creates the specified mode. + /// + /// The mode. + /// The access. + /// public GridFileStream Create(FileMode mode, FileAccess access){ switch (mode) { case FileMode.CreateNew: @@ -157,15 +211,20 @@ public GridFileStream OpenRead(){ public GridFileStream OpenWrite(){ return this.Open(FileMode.Open, FileAccess.Write); } - + + /// + /// Opens the specified mode. + /// + /// The mode. + /// The access. + /// public GridFileStream Open(FileMode mode, FileAccess access){ switch (mode) { case FileMode.Create: if(gridFile.Exists(this.FileName) == true){ return this.Open(FileMode.Truncate, access); - }else{ - return this.Create(FileMode.CreateNew, access); } + return this.Create(FileMode.CreateNew, access); case FileMode.CreateNew: return this.Create(mode, access); case FileMode.Open: @@ -215,19 +274,24 @@ public Boolean Exists{ return this.gridFile.Exists(this.FileName); } } - + /// /// Deletes all data in a file and sets the length to 0. /// public void Truncate(){ - if(filedata.Contains("_id") == false) return; - this.gridFile.Chunks.Delete(new Document().Append("files_id", filedata["_id"])); + if(filedata.ContainsKey("_id") == false) + return; + this.gridFile.Chunks.Remove(new Document().Add("files_id", filedata["_id"])); this.Length = 0; - this.gridFile.Files.Update(filedata); + this.gridFile.Files.Save(filedata); } + /// + /// Calcs the M d5. + /// + /// public string CalcMD5(){ - Document doc = this.db.SendCommand(new Document().Append("filemd5", this.Id).Append("root",this.bucket)); + Document doc = this.db.SendCommand(new Document().Add("filemd5", this.Id).Add("root", this.bucket)); return (String)doc["md5"]; } @@ -254,18 +318,28 @@ public void Refresh(){ } private void LoadFileData(){ - Document doc = this.gridFile.Files.FindOne(new Document().Append("filename",this.FileName)); + Document doc = this.gridFile.Files.FindOne(new Document().Add("filename", this.FileName)); if(doc != null){ filedata = doc; }else{ throw new DirectoryNotFoundException(this.gridFile.Name + Path.VolumeSeparatorChar + this.FileName); } } - + + /// + /// Toes the document. + /// + /// public Document ToDocument(){ return this.filedata; } - + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// public override string ToString(){ return filedata.ToString(); } diff --git a/source/MongoDB.GridFS/GridFileStream.cs b/source/MongoDB.GridFS/GridFileStream.cs new file mode 100644 index 00000000..68199806 --- /dev/null +++ b/source/MongoDB.GridFS/GridFileStream.cs @@ -0,0 +1,451 @@ +using System; +using System.IO; + +using MongoDB; + +namespace MongoDB.GridFS +{ + /// + /// Stream for reading and writing to a file in GridFS. + /// + /// + /// When using the stream for random io it is possible to produce chunks in the begining and middle of the + /// file that are not full size followed by other chunks that are full size. This only affects the md5 sum + /// that is calculated on the file on close. Because of this do not rely on the md5 sum of a file when doing + /// random io. Writing to the stream sequentially works fine and will produce a consistent md5. + /// + public class GridFileStream : Stream + { + + private IMongoCollection files; + private IMongoCollection chunks; + private Document chunk; + private bool chunkDirty; + private long chunkLower = -1; + private long chunkUpper = -1; + + private byte[] buffer; + private byte[] blankBuffer; + private int buffPosition; + private int highestBuffPosition; + private long highestPosWritten; + + + #region Properties + private GridFileInfo gridFileInfo; + /// + /// Gets or sets the grid file info. + /// + /// The grid file info. + public GridFileInfo GridFileInfo { + get { return gridFileInfo; } + set { gridFileInfo = value; } + } + + private bool canRead; + /// + /// When overridden in a derived class, gets a value indicating whether the current stream supports reading. + /// + /// + /// true if the stream supports reading; otherwise, false. + /// + public override bool CanRead { + get { return canRead; } + } + + private bool canWrite; + /// + /// When overridden in a derived class, gets a value indicating whether the current stream supports writing. + /// + /// + /// true if the stream supports writing; otherwise, false. + /// + public override bool CanWrite { + get { return canRead; } + } + + /// + /// When overridden in a derived class, gets a value indicating whether the current stream supports seeking. + /// + /// + /// true if the stream supports seeking; otherwise, false. + /// + public override bool CanSeek { + get { return true; } + } + + /// + /// When overridden in a derived class, gets the length in bytes of the stream. + /// + /// + /// + /// A long value representing the length of the stream in bytes. + /// + /// + /// A class derived from Stream does not support seeking. + /// + /// + /// Methods were called after the stream was closed. + /// + public override long Length { + get { return gridFileInfo.Length; } + } + + private long position; + /// + /// When overridden in a derived class, gets or sets the position within the current stream. + /// + /// + /// + /// The current position within the stream. + /// + /// + /// An I/O error occurs. + /// + /// + /// The stream does not support seeking. + /// + /// + /// Methods were called after the stream was closed. + /// + public override long Position { + get { return position; } + set { this.Seek (value, SeekOrigin.Begin); } + } + #endregion + + /// + /// Initializes a new instance of the class. + /// + /// The gridfileinfo. + /// The files. + /// The chunks. + /// The access. + public GridFileStream (GridFileInfo gridfileinfo, IMongoCollection files, + IMongoCollection chunks, FileAccess access) + { + switch (access) { + case FileAccess.Read: + canRead = true; + break; + case FileAccess.ReadWrite: + canRead = true; + canWrite = true; + break; + case FileAccess.Write: + canWrite = true; + break; + } + this.gridFileInfo = gridfileinfo; + this.files = files; + this.chunks = chunks; + this.buffer = new byte[gridFileInfo.ChunkSize]; + this.blankBuffer = new byte[gridFileInfo.ChunkSize]; + this.highestPosWritten = this.gridFileInfo.Length; + this.MoveTo (0); + } + + /// + /// Reads data from the stream into the specified array. It will fill the array in starting at offset and + /// adding count bytes returning the number of bytes read from the stream. + /// + public override int Read (byte[] array, int offset, int count) + { + ValidateReadState (array, offset, count); + + int bytesLeftToRead = count; + int bytesRead = 0; + while (bytesLeftToRead > 0 && this.position < this.Length) { + int buffAvailable = buffer.Length - buffPosition; + int readCount = 0; + if (buffAvailable > bytesLeftToRead) { + readCount = bytesLeftToRead; + } else { + readCount = buffAvailable; + } + if (readCount + position > highestPosWritten) { + //adjust readcount so that we don't read past the end of file. + readCount = readCount - (int)(readCount + position - highestPosWritten); + } + Array.Copy (buffer, buffPosition, array, offset, readCount); + buffPosition += readCount; + bytesLeftToRead -= readCount; + bytesRead += readCount; + offset += readCount; + MoveTo (position + readCount); + } + return bytesRead; + } + + private void ValidateReadState (byte[] array, int offset, int count) + { + if (array == null) { + throw new ArgumentNullException ("array", new Exception ("array is null")); + } else if (offset < 0) { + throw new ArgumentOutOfRangeException ("offset", new Exception ("offset is negative")); + } else if (count < 0) { + throw new ArgumentOutOfRangeException ("count", new Exception ("count is negative")); + } else if ((array.Length - offset) < count) { + throw new MongoGridFSException ("Invalid count argument", gridFileInfo.FileName, null); + } else if (!canRead) { + throw new MongoGridFSException ("Reading this file is not supported", gridFileInfo.FileName, null); + } + } + + /// + /// Copies from the source array into the grid file. + /// + /// The array. + /// A The offset within the source array. + /// A The number of bytes from within the source array to copy. + public override void Write (byte[] array, int offset, int count) + { + ValidateWriteState (array, offset, count); + + int bytesLeftToWrite = count; + while (bytesLeftToWrite > 0) { + int buffAvailable = buffer.Length - buffPosition; + int writeCount = 0; + if (buffAvailable > bytesLeftToWrite) { + writeCount = bytesLeftToWrite; + } else { + writeCount = buffAvailable; + } + Array.Copy (array, offset, buffer, buffPosition, writeCount); + chunkDirty = true; + buffPosition += writeCount; + offset += writeCount; + bytesLeftToWrite -= writeCount; + MoveTo (position + writeCount); + highestPosWritten = Math.Max (highestPosWritten, position); + } + } + + private void ValidateWriteState (byte[] array, int offset, int count) + { + if (array == null) { + throw new ArgumentNullException ("array", new Exception ("array is null")); + } else if (offset < 0) { + throw new ArgumentOutOfRangeException ("offset", new Exception ("offset is negative")); + } else if (count < 0) { + throw new ArgumentOutOfRangeException ("count", new Exception ("count is negative")); + } else if ((array.Length - offset) < count) { + throw new MongoGridFSException ("Invalid count argument", gridFileInfo.FileName, null); + } else if (!canWrite) { + throw new System.NotSupportedException ("Stream does not support writing."); + } + } + + + /// + /// Flushes any changes to current chunk to the database. It can be called in client code at any time or it + /// will automatically be called on Close() and when the stream position moves off the bounds of the current + /// chunk. + /// + /// + /// An I/O error occurs. + /// + public override void Flush () + { + if (chunkDirty == false) + return; + //avoid a copy if possible. + if (highestBuffPosition == buffer.Length) { + chunk["data"] = new Binary (buffer); + } else { + byte[] data = new byte[highestBuffPosition]; + Array.Copy (buffer, data, highestBuffPosition); + chunk["data"] = new Binary (data); + } + + + if(chunk.ContainsKey("_id")) + { + chunks.Save (chunk); + } else { + chunks.Insert (chunk); + } + this.gridFileInfo.Length = highestPosWritten; + } + + /// + /// Seek to any location in the stream. Seeking past the end of the file is allowed. Any writes to that + /// location will cause the file to grow to that size. Any holes that may be created from the seek will + /// be zero filled on close. + /// + public override long Seek (long offset, SeekOrigin origin) + { + if ((origin < SeekOrigin.Begin) || (origin > SeekOrigin.End)) { + throw new ArgumentException ("Invalid Seek Origin"); + } + + switch (origin) { + case SeekOrigin.Begin: + if (offset < 0) { + throw new ArgumentException ("Attempted seeking before the begining of the stream"); + } else { + MoveTo (offset); + } + break; + case SeekOrigin.Current: + MoveTo (position + offset); + break; + case SeekOrigin.End: + if (offset <= 0) { + throw new ArgumentException ("Attempted seeking after the end of the stream"); + } + MoveTo (this.Length - offset); + break; + } + return position; + } + + /// + /// Sets the length of this stream to the given value. + /// + /// + /// A + /// + public override void SetLength (long value) + { + if (value < 0) + throw new ArgumentOutOfRangeException ("length"); + if (this.CanSeek == false || this.CanWrite == false) { + throw new NotSupportedException ("The stream does not support both writing and seeking."); + } + + if (value < highestPosWritten) { + TruncateAfter (value); + } else { + this.Seek (value, SeekOrigin.Begin); + } + chunkDirty = true; + this.gridFileInfo.Length = value; + highestPosWritten = value; + + } + + /// + /// Close the stream and flush any changes to the database. + /// + public override void Close () + { + this.Flush (); + this.gridFileInfo.Length = highestPosWritten; + EnsureNoHoles (); + string md5 = gridFileInfo.CalcMD5 (); + gridFileInfo.Md5 = md5; + this.files.Save (gridFileInfo.ToDocument ()); + base.Close (); + } + + /// + /// Moves the current position to the new position. If this causes a new chunk to need to be loaded it will take + /// care of flushing the buffer and loading a new chunk. + /// + /// + /// A designating where to go to. + /// + private void MoveTo (long position) + { + this.position = position; + int chunkSize = this.gridFileInfo.ChunkSize; + bool chunkInRange = (chunk != null && position >= chunkLower && position < chunkUpper); + if (chunkInRange == false) { + if (chunk != null && chunkDirty) { + highestBuffPosition = Math.Max (highestBuffPosition, buffPosition); + this.Flush (); + } + int chunknum = (int)Math.Floor ((double)(position / chunkSize)); + Array.Copy (blankBuffer, buffer, buffer.Length); + LoadOrCreateChunk (chunknum); + chunkDirty = false; + chunkLower = chunknum * chunkSize; + chunkUpper = chunkLower + chunkSize; + } + buffPosition = (int)(position % chunkSize); + highestBuffPosition = Math.Max (highestBuffPosition, buffPosition); + + } + + /// + /// Loads a chunk from the chunks collection if it exists. Otherwise it creates a blank chunk Document. + /// + /// + private void LoadOrCreateChunk (int num) + { + Object fid = this.GridFileInfo.Id; + Document spec = new Document().Add("files_id", fid).Add("n", num); + chunk = this.chunks.FindOne (spec); + if (chunk == null) { + chunk = spec; + highestBuffPosition = 0; + } else { + Binary b = (Binary)chunk["data"]; + highestBuffPosition = b.Bytes.Length; + Array.Copy (b.Bytes, buffer, highestBuffPosition); + } + } + + + /// + /// Deletes all chunks after the specified position and clears out any extra bytes if the position doesn't fall on + /// a chunk boundry. + /// + private void TruncateAfter (long value) + { + int chunknum = CalcChunkNum (value); + Document spec = new Document().Add("files_id", this.gridFileInfo.Id).Add("n", new Document().Add("$gt", chunknum)); + this.chunks.Remove(spec); + this.MoveTo (value); + Array.Copy (blankBuffer, 0, buffer, buffPosition, buffer.Length - buffPosition); + highestBuffPosition = buffPosition; + } + + private int CalcChunkNum (long position) + { + int chunkSize = this.gridFileInfo.ChunkSize; + return (int)Math.Floor ((double)(position / chunkSize)); + } + + /// + /// Makes sure that at least a skelton chunk exists for all numbers. If not the MD5 calculation will fail on a sparse file. + /// + private void EnsureNoHoles () + { + int highChunk = CalcChunkNum (this.GridFileInfo.Length); + Document query = new Document().Add("files_id", this.GridFileInfo.Id).Add("n", new Document().Add("$lte", highChunk)); + Document sort = new Document().Add("n", 1); + Document fields = new Document().Add("_id", 1).Add("n", 1); + + Binary data = new Binary (this.blankBuffer); + int i = 0; + using(ICursor cur = chunks.Find(new Document().Add("query", query).Add("sort", sort), 0, 0, fields)){ + foreach (Document doc in cur.Documents) { + int n = Convert.ToInt32 (doc["n"]); + if (i < n) { + while (i < n) { + chunks.Insert (new Document ().Add("files_id", this.gridFileInfo.Id).Add("n", i).Add("data", data)); + i++; + } + } else { + i++; + } + } + } + + } + + /// + /// Releases the unmanaged resources used by the and optionally releases the managed resources. + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + protected override void Dispose (bool disposing) + { + this.canRead = false; + this.canWrite = false; + + base.Dispose (disposing); + } + } +} diff --git a/source/MongoDB.GridFS/MongoDB.GridFS.csproj b/source/MongoDB.GridFS/MongoDB.GridFS.csproj new file mode 100644 index 00000000..6f83193c --- /dev/null +++ b/source/MongoDB.GridFS/MongoDB.GridFS.csproj @@ -0,0 +1,105 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {B42DBBF9-0A1F-4749-9787-013BF8D8F435} + Library + MongoDB.GridFS + v3.5 + MongoDB.GridFS + true + ..\..\StrongName.snk + + + + + 3.5 + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true + + + true + full + false + bin\Debug + DEBUG + prompt + 4 + false + false + bin\Debug\MongoDB.GridFS.xml + AllRules.ruleset + 0618 + + + none + false + bin\Release\ + prompt + 4 + false + false + bin\Release\MongoDB.GridFS.xml + AllRules.ruleset + true + + + + + + + + AssemblyInfoGlobal.cs + + + + + + + + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + + + + StrongName.snk + + + + + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} + MongoDB + + + + \ No newline at end of file diff --git a/source/MongoDB.Tests/App.config b/source/MongoDB.Tests/App.config new file mode 100644 index 00000000..79807133 --- /dev/null +++ b/source/MongoDB.Tests/App.config @@ -0,0 +1,37 @@ + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Connections/TestConnection.cs b/source/MongoDB.Tests/IntegrationTests/Connections/TestConnection.cs new file mode 100644 index 00000000..feb82c82 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Connections/TestConnection.cs @@ -0,0 +1,72 @@ +using System.IO; +using System.Text; +using MongoDB.Bson; +using MongoDB.Connections; +using MongoDB.Protocol; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Connections +{ + [TestFixture] + public class TestConnection + { + private void WriteBadMessage(Connection conn) + { + //Write a bad message to the socket to force mongo to shut down our connection. + var writer = new BinaryWriter(conn.GetStream()); + var encoding = new UTF8Encoding(); + var msg = encoding.GetBytes("Goodbye MongoDB!"); + writer.Write(16 + msg.Length + 1); + writer.Write(1); + writer.Write(1); + writer.Write(1001); + writer.Write(msg); + writer.Write((byte)0); + } + + private QueryMessage GenerateQueryMessage() + { + var qdoc = new Document {{"listDatabases", 1.0}}; + //QueryMessage qmsg = new QueryMessage(qdoc,"system.namespaces"); + return new QueryMessage(new BsonWriterSettings(), qdoc, "admin.$cmd") + { + NumberToReturn = -1 + }; + } + + [Test] + public void TestReconnectOnce() + { + var conn = ConnectionFactoryFactory.GetConnection(string.Empty); + conn.Open(); + + WriteBadMessage(conn); + try + { + var qmsg = GenerateQueryMessage(); + conn.SendTwoWayMessage(qmsg,string.Empty); + } + catch(IOException) + { + //Should be able to resend. + Assert.IsTrue(conn.IsConnected); + var qmsg = GenerateQueryMessage(); + var rmsg = conn.SendTwoWayMessage(qmsg, string.Empty); + Assert.IsNotNull(rmsg); + } + } + + [Test] + public void TestSendQueryMessage() + { + //Connection conn = new Connection("10.141.153.2"); + var conn = ConnectionFactoryFactory.GetConnection(string.Empty); + conn.Open(); + + var qmsg = GenerateQueryMessage(); + conn.SendTwoWayMessage(qmsg, string.Empty); + + conn.Close(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Connections/TestConnectionFactory.cs b/source/MongoDB.Tests/IntegrationTests/Connections/TestConnectionFactory.cs new file mode 100644 index 00000000..01468aea --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Connections/TestConnectionFactory.cs @@ -0,0 +1,44 @@ +using System; +using MongoDB.Connections; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Connections +{ + [TestFixture] + public class TestConnectionFactory + { + [TearDown] + public void TearDown (){ + ConnectionFactoryFactory.Shutdown (); + } + + [Test] + public void TestGetConnection (){ + var connection1 = ConnectionFactoryFactory.GetConnection (string.Empty); + var connection2 = ConnectionFactoryFactory.GetConnection (string.Empty); + Assert.IsNotNull (connection1); + Assert.IsNotNull (connection2); + Assert.AreEqual (1, ConnectionFactoryFactory.PoolCount); + } + + [Test] + public void TestCreatePoolForEachUniqeConnectionString (){ + ConnectionFactoryFactory.GetConnection (string.Empty); + ConnectionFactoryFactory.GetConnection (string.Empty); + ConnectionFactoryFactory.GetConnection ("Username=test"); + ConnectionFactoryFactory.GetConnection ("Username=test"); + ConnectionFactoryFactory.GetConnection ("Server=localhost"); + Assert.AreEqual (3, ConnectionFactoryFactory.PoolCount); + } + + [Test] + public void TestExceptionWhenMinimumPoolSizeIsGreaterThenMaximumPoolSize (){ + try{ + ConnectionFactoryFactory.GetConnection("MinimumPoolSize=50; MaximumPoolSize=10"); + }catch(ArgumentException){ + }catch(Exception){ + Assert.Fail("Wrong exception thrown"); + } + } + } +} diff --git a/MongoDB.Net-Tests/Connections/TestPooledConnectionFactory.cs b/source/MongoDB.Tests/IntegrationTests/Connections/TestPooledConnectionFactory.cs similarity index 80% rename from MongoDB.Net-Tests/Connections/TestPooledConnectionFactory.cs rename to source/MongoDB.Tests/IntegrationTests/Connections/TestPooledConnectionFactory.cs index 0991bea3..442fa761 100644 --- a/MongoDB.Net-Tests/Connections/TestPooledConnectionFactory.cs +++ b/source/MongoDB.Tests/IntegrationTests/Connections/TestPooledConnectionFactory.cs @@ -1,8 +1,9 @@ using System; using System.Threading; +using MongoDB.Connections; using NUnit.Framework; -namespace MongoDB.Driver.Connections +namespace MongoDB.IntegrationTests.Connections { [TestFixture] public class TestPooledConnectionFactory @@ -10,7 +11,7 @@ public class TestPooledConnectionFactory [TestFixtureSetUp] public void SetUp() { - ConnectionFactory.Shutdown(); + ConnectionFactoryFactory.Shutdown(); } [Test] @@ -87,28 +88,6 @@ public void TestExceptionIfMaximumPoolSizeAndConnectionTimeoutAreReached() } } - [Test] - public void TestServerCirculationWorks() - { - var builder = new MongoConnectionStringBuilder(); - builder.AddServer("localhost"); - builder.AddServer("localhost", 27018); - builder.AddServer("localhost", 27019); - using(var pool = new PooledConnectionFactory(builder.ToString())) - { - var connection1 = pool.Open(); - var connection2 = pool.Open(); - var connection3 = pool.Open(); - var connection4 = pool.Open(); - var connection5 = pool.Open(); - Assert.AreEqual(27017, connection1.EndPoint.Port); - Assert.AreEqual(27018, connection2.EndPoint.Port); - Assert.AreEqual(27019, connection3.EndPoint.Port); - Assert.AreEqual(27017, connection4.EndPoint.Port); - Assert.AreEqual(27018, connection5.EndPoint.Port); - } - } - [Test] public void TestBorrowOneConnection() { @@ -159,7 +138,7 @@ public void TestDisconnectedConnectionsArentReturndToPool() } [Test] - public void TestIfConnectionLifetimeIsReachedItDosenotReturndToPool() + public void TestIfConnectionLifetimeIsReachedItDoesNotReturndToPool() { var builder = new MongoConnectionStringBuilder { diff --git a/MongoDB.Net-Tests/Connections/TestSimpleConnectionFactory.cs b/source/MongoDB.Tests/IntegrationTests/Connections/TestSimpleConnectionFactory.cs similarity index 88% rename from MongoDB.Net-Tests/Connections/TestSimpleConnectionFactory.cs rename to source/MongoDB.Tests/IntegrationTests/Connections/TestSimpleConnectionFactory.cs index 78593ded..47c1ff9b 100644 --- a/MongoDB.Net-Tests/Connections/TestSimpleConnectionFactory.cs +++ b/source/MongoDB.Tests/IntegrationTests/Connections/TestSimpleConnectionFactory.cs @@ -1,6 +1,7 @@ +using MongoDB.Connections; using NUnit.Framework; -namespace MongoDB.Driver.Connections +namespace MongoDB.IntegrationTests.Connections { [TestFixture] public class TestSimpleConnectionFactory diff --git a/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithAbstractBaseClass.cs b/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithAbstractBaseClass.cs new file mode 100644 index 00000000..c3fd8b18 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithAbstractBaseClass.cs @@ -0,0 +1,280 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Inheritance +{ + [TestFixture] + public class TestInheritanceWithAbstractBaseClass : MongoTestBase + { + abstract class Animal + { + public Oid Id { get; set; } + + public int Age { get; set; } + + public string Name { get; set; } + } + + class Bear : Animal + { } + + abstract class Cat : Animal + { } + + class Tiger : Cat + { } + + class Lion : Cat + { } + + public override string TestCollections + { + get { return "Animal"; } + } + + [SetUp] + public void TestSetup() + { + CleanDB(); + } + + protected override Configuration.MongoConfigurationBuilder GetConfiguration() + { + var builder = base.GetConfiguration(); + builder.Mapping(mapping => + { + mapping.DefaultProfile(profile => + { + profile.SubClassesAre(x => x.IsSubclassOf(typeof(Animal))); + }); + + mapping.Map(); + mapping.Map(); + mapping.Map(); + mapping.Map(); + }); + + return builder; + } + + [Test] + public void Should_persist_discriminator_using_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var docCollection = DB.GetCollection("Animal"); + + var docs = docCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(new[] { "Cat", "Tiger" }, (List)docs[0]["_t"]); + Assert.AreEqual("Bear", (string)docs[1]["_t"]); + } + + [Test] + public void Should_persist_discriminator_using_concrete_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Lion() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var docCollection = DB.GetCollection("Animal"); + + var docs = docCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(new[] { "Cat", "Tiger" }, (List)docs[0]["_t"]); + Assert.AreEqual(new[] { "Cat", "Lion" }, (List)docs[1]["_t"]); + } + + [Test] + public void Should_fetch_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = animalCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.AreEqual("Bob", animals[0].Name); + Assert.IsInstanceOfType(typeof(Bear), animals[1]); + Assert.AreEqual(20, animals[1].Age); + Assert.AreEqual("Jim", animals[1].Name); + } + + [Test] + public void Should_fetch_with_base_class_collection_through_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = (from a in animalCollection.Linq() + orderby a.Age ascending + select a).ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.AreEqual("Bob", animals[0].Name); + Assert.IsInstanceOfType(typeof(Bear), animals[1]); + Assert.AreEqual(20, animals[1].Age); + Assert.AreEqual("Jim", animals[1].Name); + } + + [Test] + public void Should_support_projections_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = animalCollection.FindAll().Fields(new { Age = true }).Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.IsNull(animals[0].Name); + Assert.IsInstanceOfType(typeof(Bear), animals[1]); + Assert.AreEqual(20, animals[1].Age); + Assert.IsNull(animals[1].Name); + } + + [Test] + public void Should_support_projections_with_base_class_collections_with_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = (from a in animalCollection.Linq() + orderby a.Age ascending + select new { a.Name, a.Age }).ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.AreEqual(19, animals[0].Age); + Assert.AreEqual("Bob", animals[0].Name); + Assert.AreEqual(20, animals[1].Age); + Assert.AreEqual("Jim", animals[1].Name); + } + + [Test] + public void Should_support_projections_with_concrete_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var cats = catCollection.FindAll().Fields(new { Age = true }).Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.IsInstanceOfType(typeof(Tiger), cats[0]); + Assert.AreEqual(19, cats[0].Age); + Assert.IsNull(cats[0].Name); + } + + [Test] + public void Should_support_projections_with_concrete_class_collections_with_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var cats = (from a in catCollection.Linq() + orderby a.Age ascending + select new { a.Name, a.Age }).ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.AreEqual(19, cats[0].Age); + Assert.AreEqual("Bob", cats[0].Name); + } + + [Test] + public void Should_fetch_with_concrete_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var cats = catCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.IsInstanceOfType(typeof(Tiger), cats[0]); + Assert.AreEqual(19, cats[0].Age); + } + + [Test] + public void Should_fetch_with_concrete_class_collection_through_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var animals = (from a in catCollection.Linq() + orderby a.Age ascending + select a).ToList(); + + Assert.AreEqual(1, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + } + + [Test] + public void Should_get_correct_count_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + Assert.AreEqual(2, animalCollection.Count()); + } + + [Test] + public void Should_get_correct_count_with_base_class_collection_using_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + Assert.AreEqual(2, animalCollection.Linq().Count()); + } + + [Test] + public void Should_get_correct_count_with_concrete_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var catCollection = DB.GetCollection(); + + Assert.AreEqual(1, catCollection.Count()); + } + + [Test] + public void Should_get_correct_count_with_concrete_class_collection_using_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Bear() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + Assert.AreEqual(1, catCollection.Linq().Count()); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithConcreteBaseClass.cs b/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithConcreteBaseClass.cs new file mode 100644 index 00000000..953497df --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Inheritance/TestInheritanceWithConcreteBaseClass.cs @@ -0,0 +1,276 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Inheritance +{ + [TestFixture] + public class TestInheritanceWithConcreteBaseClass : MongoTestBase + { + class Animal + { + public Oid Id { get; set; } + + public int Age { get; set; } + + public string Name { get; set; } + } + + class Bear : Animal + { } + + abstract class Cat : Animal + { } + + class Tiger : Cat + { } + + class Lion : Cat + { } + + public override string TestCollections + { + get { return "Animal"; } + } + + [SetUp] + public void TestSetup() + { + CleanDB(); + } + + protected override Configuration.MongoConfigurationBuilder GetConfiguration() + { + var builder = base.GetConfiguration(); + builder.Mapping(mapping => + { + mapping.DefaultProfile(profile => + { + profile.SubClassesAre(x => x.IsSubclassOf(typeof(Animal))); + }); + + mapping.Map(); + mapping.Map(); + mapping.Map(); + mapping.Map(); + }); + + return builder; + } + + [Test] + public void Should_persist_discriminator_using_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var docCollection = DB.GetCollection("Animal"); + + var docs = docCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(new[] { "Cat", "Tiger" }, (List)docs[0]["_t"]); + Assert.IsNull(docs[1]["_t"]); + } + + [Test] + public void Should_persist_discriminator_using_inherited_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Lion() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var docCollection = DB.GetCollection("Animal"); + + var docs = docCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(new[] { "Cat", "Tiger" }, (List)docs[0]["_t"]); + Assert.AreEqual(new[] { "Cat", "Lion" }, (List)docs[1]["_t"]); + } + + [Test] + public void Should_fetch_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var animals = animalCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.IsInstanceOfType(typeof(Animal), animals[1]); + Assert.AreEqual(20, animals[1].Age); + } + + [Test] + public void Should_fetch_with_base_class_collection_through_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var animals = (from a in animalCollection.Linq() + orderby a.Age ascending + select a).ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.IsInstanceOfType(typeof(Animal), animals[1]); + Assert.AreEqual(20, animals[1].Age); + } + + [Test] + public void Should_fetch_with_inherited_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var catCollection = DB.GetCollection(); + + var cats = catCollection.FindAll().Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.IsInstanceOfType(typeof(Tiger), cats[0]); + Assert.AreEqual(19, cats[0].Age); + } + + [Test] + public void Should_fetch_with_inherited_class_collection_through_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var catCollection = DB.GetCollection(); + + var animals = (from a in catCollection.Linq() + orderby a.Age ascending + select a).ToList(); + + Assert.AreEqual(1, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + } + + [Test] + public void Should_support_projections_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = animalCollection.FindAll().Fields(new { Age = true }).Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.IsInstanceOfType(typeof(Tiger), animals[0]); + Assert.AreEqual(19, animals[0].Age); + Assert.IsNull(animals[0].Name); + Assert.IsInstanceOfType(typeof(Animal), animals[1]); + Assert.AreEqual(20, animals[1].Age); + Assert.IsNull(animals[1].Name); + } + + [Test] + public void Should_support_projections_with_base_class_collections_with_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var animals = (from a in animalCollection.Linq() + orderby a.Age ascending + select new { a.Name, a.Age }).ToList(); + + Assert.AreEqual(2, animals.Count); + Assert.AreEqual(19, animals[0].Age); + Assert.AreEqual("Bob", animals[0].Name); + Assert.AreEqual(20, animals[1].Age); + Assert.AreEqual("Jim", animals[1].Name); + } + + [Test] + public void Should_support_projections_with_inherited_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var cats = catCollection.FindAll().Fields(new { Age = true }).Sort("Age", IndexOrder.Ascending).Documents.ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.IsInstanceOfType(typeof(Tiger), cats[0]); + Assert.AreEqual(19, cats[0].Age); + Assert.IsNull(cats[0].Name); + } + + [Test] + public void Should_support_projections_with_inherited_class_collections_with_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20, Name = "Jim" }); + animalCollection.Save(new Tiger() { Age = 19, Name = "Bob" }); + + var catCollection = DB.GetCollection(); + + var cats = (from a in catCollection.Linq() + orderby a.Age ascending + select new { a.Name, a.Age }).ToList(); + + Assert.AreEqual(1, cats.Count); + Assert.AreEqual(19, cats[0].Age); + Assert.AreEqual("Bob", cats[0].Name); + } + + [Test] + public void Should_get_correct_count_with_base_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + Assert.AreEqual(2, animalCollection.Count()); + } + + [Test] + public void Should_get_correct_count_with_base_class_collection_using_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + Assert.AreEqual(2, animalCollection.Linq().Count()); + } + + [Test] + public void Should_get_correct_count_with_inherited_class_collection() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var catCollection = DB.GetCollection(); + + Assert.AreEqual(1, catCollection.Count()); + } + + [Test] + public void Should_get_correct_count_with_inherited_class_collection_using_linq() + { + var animalCollection = DB.GetCollection(); + animalCollection.Save(new Animal() { Age = 20 }); + animalCollection.Save(new Tiger() { Age = 19 }); + + var catCollection = DB.GetCollection(); + + Assert.AreEqual(1, catCollection.Linq().Count()); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/LinqDomain.cs b/source/MongoDB.Tests/IntegrationTests/Linq/LinqDomain.cs new file mode 100644 index 00000000..cfa370df --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/LinqDomain.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; + +using MongoDB.Attributes; + +namespace MongoDB.IntegrationTests.Linq +{ + public class Person + { + [MongoAlias("fn")] + public string FirstName { get; set; } + + [MongoAlias("ln")] + public string LastName { get; set; } + + [MongoAlias("age")] + public int Age { get; set; } + + [MongoAlias("add")] + public Address PrimaryAddress { get; set; } + + [MongoAlias("otherAdds")] + public List
Addresses { get; set; } + + [MongoAlias("emps")] + public int[] EmployerIds { get; set; } + + public string MidName { get; set; } + + public Oid LinkedId { get; set; } + } + + public class Address + { + [MongoAlias("city")] + public string City { get; set; } + + public bool IsInternational { get; set; } + + public AddressType AddressType { get; set; } + } + + public enum AddressType + { + Company, + Private + } + + public class PersonWrapper + { + public Person Person { get; set; } + public string Name { get; set; } + + public PersonWrapper(Person person, string name) + { + Person = person; + Name = name; + } + } +} diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/LinqExtensionsTests.cs b/source/MongoDB.Tests/IntegrationTests/Linq/LinqExtensionsTests.cs new file mode 100644 index 00000000..22451e62 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/LinqExtensionsTests.cs @@ -0,0 +1,83 @@ +using System.Linq; +using MongoDB.Linq; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Linq +{ + [TestFixture] + public class LinqExtensionsTests : MongoTestBase + { + private class Person + { + public string FirstName { get; set; } + + public string LastName { get; set; } + + public int Age { get; set; } + + public Address Address { get; set; } + + public string[] Aliases { get; set; } + } + + private class Address + { + public string City { get; set; } + } + + private class Organization + { + public string Name { get; set; } + + public Address Address { get; set; } + } + + private IMongoCollection personCollection; + private IMongoCollection orgCollection; + + public override string TestCollections + { + get { return "people"; } + } + + [SetUp] + public void TestSetup() + { + personCollection = this.DB.GetCollection("people"); + personCollection.Delete(new { }, true); + personCollection.Insert(new Person { FirstName = "Bob", LastName = "McBob", Age = 42, Address = new Address { City = "London" }, Aliases = new[]{"Blub"} }, true); + personCollection.Insert(new Person { FirstName = "Jane", LastName = "McJane", Age = 35, Address = new Address { City = "Paris" } }, true); + personCollection.Insert(new Person { FirstName = "Joe", LastName = "McJoe", Age = 21, Address = new Address { City = "Chicago" } }, true); + + orgCollection = this.DB.GetCollection("orgs"); + orgCollection.Delete(new { }, true); + orgCollection.Insert(new Organization { Name = "The Muffler Shanty", Address = new Address { City = "London" } }, true); + } + + [Test] + public void Delete() + { + personCollection.Delete(p => true); + + Assert.AreEqual(0, personCollection.Count()); + } + + [Test] + public void Find() + { + var people = personCollection.Find(x => x.Age > 21).Documents; + + Assert.AreEqual(2, people.Count()); + } + + [Test] + public void FindOne_WithAny() + { + var person = personCollection.FindOne(e => e.Aliases.Any(a=>a=="Blub")); + + Assert.IsNotNull(person); + Assert.AreEqual("Bob",person.FirstName); + } + + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/LinqTestsBase.cs b/source/MongoDB.Tests/IntegrationTests/Linq/LinqTestsBase.cs new file mode 100644 index 00000000..bd376037 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/LinqTestsBase.cs @@ -0,0 +1,22 @@ +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Linq +{ + public class LinqTestsBase : MongoTestBase + { + public override string TestCollections + { + get { return "people"; } + } + + protected IMongoCollection Collection; + protected IMongoCollection DocumentCollection; + + [SetUp] + public virtual void TestSetup() + { + Collection = DB.GetCollection("people"); + DocumentCollection = DB.GetCollection("people"); + } + } +} diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/MapReduceTests.cs b/source/MongoDB.Tests/IntegrationTests/Linq/MapReduceTests.cs new file mode 100644 index 00000000..f27cd317 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/MapReduceTests.cs @@ -0,0 +1,175 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; + +using MongoDB.Linq; + +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Linq +{ + [TestFixture] + public class MapReduceTests : LinqTestsBase + { + public override void TestSetup() + { + base.TestSetup(); + + Collection.Delete(new { }, true); + Collection.Insert( + new Person + { + FirstName = "Bob", + LastName = "McBob", + Age = 42, + PrimaryAddress = new Address { City = "London" }, + Addresses = new List
+ { + new Address { City = "London" }, + new Address { City = "Tokyo" }, + new Address { City = "Seattle" } + }, + EmployerIds = new[] { 1, 2 } + }, true); + + Collection.Insert( + new Person + { + FirstName = "Jane", + LastName = "McJane", + Age = 35, + PrimaryAddress = new Address { City = "Paris" }, + Addresses = new List
+ { + new Address { City = "Paris" } + }, + EmployerIds = new[] { 1 } + + }, true); + + Collection.Insert( + new Person + { + FirstName = "Joe", + LastName = "McJoe", + Age = 21, + PrimaryAddress = new Address { City = "Chicago" }, + Addresses = new List
+ { + new Address { City = "Chicago" }, + new Address { City = "London" } + }, + EmployerIds = new[] { 3 } + }, true); + } + + [Test] + public void Off_of_select() + { + var minAge = Collection.Linq().Select(x => x.Age).Min(); + + Assert.AreEqual(21, minAge); + } + + [Test] + public void Off_of_root() + { + var minAge = Collection.Linq().Min(x => x.Age); + + Assert.AreEqual(21, minAge); + } + + [Test] + public void NoGrouping() + { + var grouping = Enumerable.ToList(from p in Collection.Linq() + where p.Age > 21 + group p by 1 into g + select new + { + Average = g.Average(x => x.Age), + Min = g.Min(x => x.Age), + Max = g.Max(x => x.Age), + Count = g.Count(), + Sum = g.Sum(x => x.Age) + }); + + Assert.AreEqual(1, grouping.Count); + Assert.AreEqual(38.5, grouping.Single().Average); + Assert.AreEqual(35, grouping.Single().Min); + Assert.AreEqual(42, grouping.Single().Max); + Assert.AreEqual(2, grouping.Single().Count); + Assert.AreEqual(77, grouping.Single().Sum); + } + + [Test] + public void Expression_Grouping() + { + var grouping = Enumerable.ToList(from p in Collection.Linq() + group p by p.Age % 2 into g + select new + { + IsEven = g.Key == 0, + Min = g.Min(x => x.Age), + Max = g.Max(x => x.Age), + Count = g.Count(), + Sum = g.Sum(x => x.Age) + }); + + Assert.AreEqual(2, grouping.Count); + Assert.AreEqual(1, grouping[0].Count); + Assert.AreEqual(42, grouping[0].Max); + Assert.AreEqual(42, grouping[0].Min); + Assert.AreEqual(42, grouping[0].Sum); + Assert.AreEqual(2, grouping[1].Count); + Assert.AreEqual(35, grouping[1].Max); + Assert.AreEqual(21, grouping[1].Min); + Assert.AreEqual(56, grouping[1].Sum); + } + + [Test] + public void Expression_Grouping2() + { + var grouping = Enumerable.ToList(from p in Collection.Linq() + group p by p.FirstName[0] into g + select new + { + FirstLetter = g.Key, + Min = g.Min(x => x.Age), + Max = g.Max(x => x.Age) + }); + + Assert.AreEqual(2, grouping.Count); + Assert.AreEqual('B', grouping[0].FirstLetter); + Assert.AreEqual(42, grouping[0].Max); + Assert.AreEqual(42, grouping[0].Min); + Assert.AreEqual('J', grouping[1].FirstLetter); + Assert.AreEqual(35, grouping[1].Max); + Assert.AreEqual(21, grouping[1].Min); + } + + [Test] + public void Complex() + { + var grouping = Enumerable.ToList(from p in Collection.Linq() + where p.Age > 21 + group p by new { FirstName = p.FirstName, LastName = p.LastName } into g + select new + { + Name = g.Key.FirstName + " " + g.Key.LastName, + Min = g.Min(x => x.Age) + 100, + Max = g.Max(x => x.Age) + 100 + }); + + Assert.AreEqual(2, grouping.Count); + Assert.AreEqual("Bob McBob", grouping[0].Name); + Assert.AreEqual(142, grouping[0].Max); + Assert.AreEqual(142, grouping[0].Min); + Assert.AreEqual("Jane McJane", grouping[1].Name); + Assert.AreEqual(135, grouping[1].Max); + Assert.AreEqual(135, grouping[1].Min); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryProviderTests.cs b/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryProviderTests.cs new file mode 100644 index 00000000..8d10e377 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryProviderTests.cs @@ -0,0 +1,447 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; +using MongoDB.Linq; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Linq +{ + [TestFixture] + public class MongoQueryProviderTests : LinqTestsBase + { + [Test] + public void Boolean1() + { + var people = Collection.Linq().Where(x => x.PrimaryAddress.IsInternational); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(new Document("PrimaryAddress.IsInternational", true), queryObject.Query); + } + + [Test] + public void Boolean_Inverse() + { + var people = Collection.Linq().Where(x => !x.PrimaryAddress.IsInternational); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(new Document("$not", new Document("PrimaryAddress.IsInternational", true)), queryObject.Query); + } + + [Test] + public void Boolean_In_Conjunction() + { + var people = Collection.Linq().Where(x => x.PrimaryAddress.IsInternational && x.Age > 21); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(new Document("PrimaryAddress.IsInternational", true).Add("Age", Op.GreaterThan(21)), queryObject.Query); + } + + [Test] + public void Chained() + { + var people = Collection.Linq() + .Select(x => new {Name = x.FirstName + x.LastName, x.Age}) + .Where(x => x.Age > 21) + .Select(x => x.Name); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(2, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", Op.GreaterThan(21)), queryObject.Query); + } + + [Test] + public void ConjuctionConstraint() + { + var people = Collection.Linq().Where(p => p.Age > 21 && p.Age < 42); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", new Document().Merge(Op.GreaterThan(21)).Merge(Op.LessThan(42))), queryObject.Query); + } + + [Test] + public void ConstraintsAgainstLocalReferenceMember() + { + var local = new {Test = new {Age = 21}}; + var people = Collection.Linq().Where(p => p.Age > local.Test.Age); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", Op.GreaterThan(local.Test.Age)), queryObject.Query); + } + + [Test] + public void ConstraintsAgainstLocalVariable() + { + var age = 21; + var people = Collection.Linq().Where(p => p.Age > age); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", Op.GreaterThan(age)), queryObject.Query); + } + + [Test] + [Ignore("Something is interesting about document comparison that causes this to fail.")] + public void Disjunction() + { + var people = Collection.Linq().Where(x => x.Age == 21 || x.Age == 35); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("$where", new Code("((this.Age === 21) || (this.Age === 35))")), queryObject.Query); + } + + [Test] + public void DocumentQuery() + { + var people = from p in DocumentCollection.Linq() + where p.Key("Age") > 21 + select (string)p["FirstName"]; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(new Document("FirstName", 1), queryObject.Fields); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", Op.GreaterThan(21)), queryObject.Query); + } + + [Test] + public void Enum() + { + var people = Collection.Linq().Where(x => x.PrimaryAddress.AddressType == AddressType.Company); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("PrimaryAddress.AddressType", (int)AddressType.Company), queryObject.Query); + } + + [Test] + public void LocalEnumerable_Contains() + { + var names = new[] {"Jack", "Bob"}; + var people = Collection.Linq().Where(x => names.Contains(x.FirstName)); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", Op.In("Jack", "Bob")), queryObject.Query); + } + + [Test] + public void LocalList_Contains() + { + var names = new List {"Jack", "Bob"}; + var people = Collection.Linq().Where(x => names.Contains(x.FirstName)); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", Op.In("Jack", "Bob")), queryObject.Query); + } + + [Test] + public void NestedArray_Length() + { + var people = from p in Collection.Linq() + where p.EmployerIds.Length == 1 + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("EmployerIds", Op.Size(1)), queryObject.Query); + } + + [Test] + public void NestedArray_indexer() + { + var people = Collection.Linq().Where(x => x.EmployerIds[0] == 1); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("EmployerIds.0", 1), queryObject.Query); + } + + [Test] + public void NestedClassConstraint() + { + var people = Collection.Linq().Where(p => p.PrimaryAddress.City == "my city"); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("PrimaryAddress.City", "my city"), queryObject.Query); + } + + [Test] + public void NestedCollection_Count() + { + var people = from p in Collection.Linq() + where p.Addresses.Count == 1 + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Addresses", Op.Size(1)), queryObject.Query); + } + + [Test] + public void NestedList_indexer() + { + var people = Collection.Linq().Where(x => x.Addresses[1].City == "Tokyo"); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Addresses.1.City", "Tokyo"), queryObject.Query); + } + + [Test] + public void NestedQueryable_Any() + { + var people = Collection.Linq().Where(x => x.Addresses.Any(a => a.City == "London")); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Addresses", new Document("$elemMatch", new Document("City", "London"))), queryObject.Query); + } + + [Test] + public void NestedQueryable_Contains() + { + var people = Collection.Linq().Where(x => x.EmployerIds.Contains(1)); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("EmployerIds", 1), queryObject.Query); + } + + [Test] + public void Nested_Queryable_Count() + { + var people = Collection.Linq().Where(x => x.Addresses.Count() == 1); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Addresses", Op.Size(1)), queryObject.Query); + } + + [Test] + public void Nested_Queryable_ElementAt() + { + var people = Collection.Linq().Where(x => x.Addresses.ElementAt(1).City == "Tokyo"); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Addresses.1.City", "Tokyo"), queryObject.Query); + } + + [Test] + public void NotNullCheck() + { + var people = Collection.Linq().Where(x => x.MidName != null); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("MidName", Op.NotEqual(null)), queryObject.Query); + } + + [Test] + public void NullCheck() + { + var people = Collection.Linq().Where(x => x.MidName == null); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("MidName", null), queryObject.Query); + } + + [Test] + public void OrderBy() + { + var people = Collection.Linq().OrderBy(x => x.Age).ThenByDescending(x => x.LastName); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", 1).Add("LastName", -1), queryObject.Sort); + } + + [Test] + public void Projection() + { + var people = from p in Collection.Linq() + select new {Name = p.FirstName + p.LastName}; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(2, queryObject.Fields.Count()); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(0, queryObject.Query.Count); + } + + [Test] + public void ProjectionWithConstraints() + { + var people = from p in Collection.Linq() + where p.Age > 21 && p.Age < 42 + select new {Name = p.FirstName + p.LastName}; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(2, queryObject.Fields.Count()); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("Age", new Document().Merge(Op.GreaterThan(21)).Merge(Op.LessThan(42))), queryObject.Query); + } + + [Test] + public void ProjectionWithLocalCreation_ChildobjectShouldNotBeNull() + { + var people = Collection.Linq() + .Select(p => new PersonWrapper(p, p.FirstName)); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count()); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(0, queryObject.Query.Count); + + } + + [Test] + public void Regex_IsMatch() + { + var people = from p in Collection.Linq() + where Regex.IsMatch(p.FirstName, "Joe") + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", new MongoRegex("Joe")), queryObject.Query); + } + + [Test] + public void Regex_IsMatch_CaseInsensitive() + { + var people = from p in Collection.Linq() + where Regex.IsMatch(p.FirstName, "Joe", RegexOptions.IgnoreCase) + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", new MongoRegex("Joe", MongoRegexOption.IgnoreCase)), queryObject.Query); + } + + [Test] + public void SingleEqualConstraint() + { + var people = Collection.Linq().Where(p => "Jack" == p.FirstName); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", "Jack"), queryObject.Query); + } + + [Test] + public void SkipAndTake() + { + var people = Collection.Linq().Skip(2).Take(1); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(1, queryObject.NumberToLimit); + Assert.AreEqual(2, queryObject.NumberToSkip); + } + + [Test] + public void String_Contains() + { + var people = from p in Collection.Linq() + where p.FirstName.Contains("o") + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", new MongoRegex("o")), queryObject.Query); + } + + [Test] + public void String_EndsWith() + { + var people = from p in Collection.Linq() + where p.FirstName.EndsWith("e") + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", new MongoRegex("e$")), queryObject.Query); + } + + [Test] + public void String_StartsWith() + { + var people = from p in Collection.Linq() + where p.FirstName.StartsWith("J") + select p; + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + Assert.AreEqual(0, queryObject.Fields.Count); + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(new Document("FirstName", new MongoRegex("^J")), queryObject.Query); + } + + [Test] + public void WithoutConstraints() + { + var people = Collection.Linq(); + + var queryObject = ((IMongoQueryable)people).GetQueryObject(); + + Assert.AreEqual(0, queryObject.NumberToLimit); + Assert.AreEqual(0, queryObject.NumberToSkip); + Assert.AreEqual(0, queryObject.Query.Count); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryTests.cs b/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryTests.cs new file mode 100644 index 00000000..94ae8908 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/Linq/MongoQueryTests.cs @@ -0,0 +1,445 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; +using MongoDB.Linq; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.Linq +{ + [TestFixture] + public class MongoQueryTests : LinqTestsBase + { + public override void TestSetup() + { + base.TestSetup(); + + Collection.Delete(new {}, true); + Collection.Insert( + new Person + { + FirstName = "Bob", + MidName = "Bart", + LastName = "McBob", + Age = 42, + PrimaryAddress = new Address {City = "London", IsInternational = true, AddressType = AddressType.Company}, + Addresses = new List
+ { + new Address { City = "London", IsInternational = true, AddressType = AddressType.Company }, + new Address { City = "Tokyo", IsInternational = true, AddressType = AddressType.Private }, + new Address { City = "Seattle", IsInternational = false, AddressType = AddressType.Private } + }, + EmployerIds = new[] { 1, 2 } + }, true); + + Collection.Insert( + new Person + { + FirstName = "Jane", + LastName = "McJane", + Age = 35, + PrimaryAddress = new Address { City = "Paris", IsInternational = false, AddressType = AddressType.Private }, + Addresses = new List
+ { + new Address { City = "Paris", AddressType = AddressType.Private } + }, + EmployerIds = new[] {1} + }, + true); + + Collection.Insert( + new Person + { + FirstName = "Joe", + LastName = "McJoe", + Age = 21, + PrimaryAddress = new Address { City = "Chicago", IsInternational = true, AddressType = AddressType.Private }, + Addresses = new List
+ { + new Address { City = "Chicago", AddressType = AddressType.Private }, + new Address { City = "London", AddressType = AddressType.Company } + }, + EmployerIds = new[] {3} + }, + true); + } + + [Test] + public void Any() + { + var anyone = Collection.Linq().Any(x => x.Age <= 21); + + Assert.IsTrue(anyone); + } + + [Test] + public void Boolean() + { + var people = Enumerable.ToList(Collection.Linq().Where(x => x.PrimaryAddress.IsInternational)); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void Boolean_Inverse() + { + var people = Enumerable.ToList(Collection.Linq().Where(x => !x.PrimaryAddress.IsInternational)); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Boolean_In_Conjunction() + { + var people = Enumerable.ToList(Collection.Linq().Where(x => x.PrimaryAddress.IsInternational && x.Age > 21)); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Chained() + { + var people = Collection.Linq() + .Select(x => new { Name = x.FirstName + x.LastName, x.Age }) + .Where(x => x.Age > 21) + .Select(x => x.Name).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void Complex_Addition() + { + var people = Collection.Linq().Where(x => x.Age + 23 < 50).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Complex_Disjunction() + { + var people = Collection.Linq().Where(x => x.Age == 21 || x.Age == 35).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void ConjuctionConstraint() + { + var people = Collection.Linq().Where(p => p.Age > 21 && p.Age < 42).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void ConstraintsAgainstLocalReferenceMember() + { + var local = new { Test = new { Age = 21 } }; + var people = Collection.Linq().Where(p => p.Age > local.Test.Age).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void ConstraintsAgainstLocalVariable() + { + var age = 21; + var people = Collection.Linq().Where(p => p.Age > age).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void Count() + { + var count = Collection.Linq().Count(); + + Assert.AreEqual(3, count); + } + + [Test] + public void Count_with_predicate() + { + var count = Collection.Linq().Count(x => x.Age > 21); + + Assert.AreEqual(2, count); + } + + [Test] + public void Count_without_predicate() + { + var count = Collection.Linq().Where(x => x.Age > 21).Count(); + + Assert.AreEqual(2, count); + } + + [Test] + public void DocumentQuery() + { + var people = (from p in DocumentCollection.Linq() + where p.Key("age") > 21 + select (string)p["fn"]).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void Enum() + { + var people = Collection.Linq() + .Where(x => x.PrimaryAddress.AddressType == AddressType.Company) + .ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void First() + { + var person = Collection.Linq().OrderBy(x => x.Age).First(); + + Assert.AreEqual("Joe", person.FirstName); + } + + [Test] + public void LocalEnumerable_Contains() + { + var names = new[] { "Joe", "Bob" }; + var people = Collection.Linq().Where(x => names.Contains(x.FirstName)).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void LocalList_Contains() + { + var names = new List { "Joe", "Bob" }; + var people = Collection.Linq().Where(x => names.Contains(x.FirstName)).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void NestedArray_Length() + { + var people = (from p in Collection.Linq() + where p.EmployerIds.Length == 1 + select p).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test(Description = "This will fail < 1.4")] + public void NestedArray_indexer() + { + var people = Collection.Linq().Where(x => x.EmployerIds[0] == 1).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void NestedClassConstraint() + { + var people = Collection.Linq().Where(p => p.PrimaryAddress.City == "London").ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void NestedCollection_Count() + { + var people = (from p in Collection.Linq() + where p.Addresses.Count == 1 + select p).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test(Description = "This will fail < 1.4")] + public void NestedList_indexer() + { + var people = Collection.Linq().Where(x => x.Addresses[1].City == "Tokyo").ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void NestedQueryable_Any() + { + var people = Collection.Linq().Where(x => x.Addresses.Any(a => a.City == "London")).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void NestedQueryable_Contains() + { + var people = Collection.Linq().Where(x => x.EmployerIds.Contains(1)).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void Nested_Queryable_Count() + { + var people = Collection.Linq().Where(x => x.Addresses.Count() == 1).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test(Description = "This will fail < 1.4")] + public void Nested_Queryable_ElementAt() + { + var people = Collection.Linq().Where(x => x.Addresses.ElementAt(1).City == "Tokyo").ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void NotNullCheck() + { + var people = Collection.Linq().Where(x => x.MidName != null).ToArray(); + + Assert.AreEqual(1, people.Length); + } + + [Test] + public void NullCheck() + { + var people = Collection.Linq().Where(x => x.MidName == null).ToArray(); + + Assert.AreEqual(2, people.Length); + } + + [Test] + public void NullCheckOnClassTypes() + { + var people = Collection.Linq().Where(x => x.LinkedId == null).ToArray(); + + Assert.AreEqual(3, people.Length); + } + + [Test] + public void OrderBy() + { + var people = Collection.Linq().OrderBy(x => x.Age).ThenByDescending(x => x.LastName).ToList(); + + Assert.AreEqual("Joe", people[0].FirstName); + Assert.AreEqual("Jane", people[1].FirstName); + Assert.AreEqual("Bob", people[2].FirstName); + } + + [Test] + public void Projection() + { + var people = (from p in Collection.Linq() + select new { Name = p.FirstName + p.LastName }).ToList(); + + Assert.AreEqual(3, people.Count); + } + + [Test] + public void ProjectionWithLocalCreation_ChildobjectShouldNotBeNull() + { + var people = Collection.Linq() + .Select(p => new PersonWrapper(p, p.FirstName)) + .FirstOrDefault(); + + Assert.IsNotNull(people); + Assert.IsNotNull(people.Name); + Assert.IsNotNull(people.Person); + Assert.IsNotNull(people.Person.PrimaryAddress); + } + + [Test] + public void ProjectionWithConstraints() + { + var people = (from p in Collection.Linq() + where p.Age > 21 && p.Age < 42 + select new { Name = p.FirstName + p.LastName }).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Regex_IsMatch() + { + var people = (from p in Collection.Linq() + where Regex.IsMatch(p.FirstName, "Joe") + select p).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Regex_IsMatch_CaseInsensitive() + { + var people = (from p in Collection.Linq() + where Regex.IsMatch(p.FirstName, "joe", RegexOptions.IgnoreCase) + select p).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void Single() + { + var person = Collection.Linq().Where(x => x.Age == 21).Single(); + + Assert.AreEqual("Joe", person.FirstName); + } + + [Test] + public void SingleEqualConstraint() + { + var people = Collection.Linq().Where(p => "Joe" == p.FirstName).ToList(); + + Assert.AreEqual(1, people.Count); + } + + [Test] + public void SkipAndTake() + { + var people = Collection.Linq().OrderBy(x => x.Age).Skip(2).Take(1).ToList(); + + Assert.AreEqual("Bob", people[0].FirstName); + } + + [Test] + public void String_Contains() + { + var people = (from p in Collection.Linq() + where p.FirstName.Contains("o") + select p).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void String_EndsWith() + { + var people = (from p in Collection.Linq() + where p.FirstName.EndsWith("e") + select p).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void String_StartsWith() + { + var people = (from p in Collection.Linq() + where p.FirstName.StartsWith("J") + select p).ToList(); + + Assert.AreEqual(2, people.Count); + } + + [Test] + public void WithoutConstraints() + { + var people = Collection.Linq().ToList(); + + Assert.AreEqual(3, people.Count); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/SecondServer/TestAuthentication.cs b/source/MongoDB.Tests/IntegrationTests/SecondServer/TestAuthentication.cs new file mode 100644 index 00000000..3208cce4 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/SecondServer/TestAuthentication.cs @@ -0,0 +1,132 @@ +using System; +using System.Configuration; +using MongoDB.Connections; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.SecondServer +{ + /// + /// In case clean up fails open a Mongo shell and execute the following commands + /// * use admin + /// * db.auth("adminuser", "admin1234"); + /// * db.system.users.find(); //should see adminuser + /// * db.system.users.remove({user:"adminuser"}); + /// * db.system.users.find(); //should not see adminuser or any other. + /// * Tests should now run. + /// + [Ignore("Run manually since it needs a second server with --auth")] + [TestFixture] + public class TestAuthentication + { + private readonly string _connectionString; + + public TestAuthentication() + { + _connectionString = ConfigurationManager.AppSettings["auth"]; + } + + const String TestDatabaseName = "testAuth"; + const String TestUser = "testuser"; + const String TestPass = "test1234"; + + const String AdminUser = "adminuser"; + const String AdminPass = "admin1234"; + + [TestFixtureSetUp] + public void SetUp() + { + using(var mongo = new Mongo(_connectionString)) + { + mongo.Connect(); + + var testDatabase = mongo[TestDatabaseName]; + if(testDatabase.Metadata.FindUser(TestUser) == null) + testDatabase.Metadata.AddUser(TestUser, TestPass); + + var adminDatabase = mongo["admin"]; + if(adminDatabase.Metadata.FindUser(AdminUser) == null) + adminDatabase.Metadata.AddUser(AdminUser, AdminPass); + } + } + + [Test] + public void TestLoginGoodPassword() + { + using(var mongo = ConnectAndAuthenticatedMongo(TestUser, TestPass)) + TryInsertData(mongo); + } + + [Test] + [ExpectedException(typeof(MongoException))] + public void TestLoginBadPassword() + { + using(var mongo = ConnectAndAuthenticatedMongo(TestUser, "badpassword")) + TryInsertData(mongo); + } + + [Test] + public void TestAuthenticatedInsert(){ + using(var mongo = ConnectAndAuthenticatedMongo(TestUser, TestPass)) + TryInsertData(mongo); + } + + [Test] + [ExpectedException(typeof(MongoOperationException))] + public void TestUnauthenticatedInsert(){ + using(var mongo = new Mongo(_connectionString)) + { + mongo.Connect(); + + TryInsertData(mongo); + } + } + + [Test] + [ExpectedException(typeof(MongoException))] + public void TryUnautenticatedInsertWithoutSendingACommand() + { + using(var mongo = ConnectAndAuthenticatedMongo("noexisting", "noexisting")) + { + mongo.Connect(); + + var collection = mongo[TestDatabaseName]["testCollection"]; + collection.Insert(new Document().Add("value", 84), false); + } + } + + private Mongo ConnectAndAuthenticatedMongo(string username,string password) + { + var builder = new MongoConnectionStringBuilder(_connectionString) + { + Username = username, + Password = password + }; + var mongo = new Mongo(builder.ToString()); + mongo.Connect(); + return mongo; + } + + private static void TryInsertData(Mongo mongo) + { + var collection = mongo[TestDatabaseName]["testCollection"]; + collection.Delete(new Document(),true); + collection.Insert(new Document().Add("value", 84),true); + + var value = collection.FindOne(new Document().Add("value", 84)); + + Assert.AreEqual(84, value["value"]); + } + + [TestFixtureTearDown] + public void TestTearDown(){ + using(var mongo = ConnectAndAuthenticatedMongo(AdminUser, AdminPass)) + { + mongo[TestDatabaseName].Metadata.RemoveUser(TestUser); + mongo["admin"].Metadata.RemoveUser(AdminUser); + } + + // clean connections + ConnectionFactoryFactory.Shutdown(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/SecondServer/TestPooledConnectionFactory.cs b/source/MongoDB.Tests/IntegrationTests/SecondServer/TestPooledConnectionFactory.cs new file mode 100644 index 00000000..f0094473 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/SecondServer/TestPooledConnectionFactory.cs @@ -0,0 +1,38 @@ +using MongoDB.Connections; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests.SecondServer +{ + [Ignore("Run manually since it needs a second server")] + [TestFixture] + public class TestPooledConnectionFactory + { + [TestFixtureSetUp] + public void SetUp() + { + ConnectionFactoryFactory.Shutdown(); + } + + [Test] + public void TestServerCirculationWorks() + { + var builder = new MongoConnectionStringBuilder(); + builder.AddServer("localhost", 27017); + builder.AddServer("localhost", 27018); + using(var pool = new PooledConnectionFactory(builder.ToString())) + { + var connection1 = pool.Open(); + var connection2 = pool.Open(); + var connection3 = pool.Open(); + var connection4 = pool.Open(); + var connection5 = pool.Open(); + Assert.AreEqual(27017, connection1.EndPoint.Port); + Assert.AreEqual(27018, connection2.EndPoint.Port); + Assert.AreEqual(27017, connection3.EndPoint.Port); + Assert.AreEqual(27018, connection4.EndPoint.Port); + Assert.AreEqual(27017, connection5.EndPoint.Port); + } + } + + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/TestCollection.cs b/source/MongoDB.Tests/IntegrationTests/TestCollection.cs new file mode 100644 index 00000000..3aa6da98 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestCollection.cs @@ -0,0 +1,555 @@ +using System; +using System.Linq; +using MongoDB.Util; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestCollection : MongoTestBase + { + const string POUND = "\u00a3"; + + public override string TestCollections + { + get { return "inserts,updates,counts,counts_spec,finds,charreads,saves,find_and_modify"; } + } + + public override void OnInit() + { + var finds = DB["finds"]; + for(var j = 1; j < 100; j++) + finds.Insert(new Document {{"x", 4}, {"h", "hi"}, {"j", j}}); + for(var j = 100; j < 105; j++) + finds.Insert(new Document {{"x", 4}, {"n", 1}, {"j", j}}); + var charreads = DB["charreads"]; + charreads.Insert(new Document {{"test", "1234" + POUND + "56"}}); + } + + private static int CountDocs(ICursor cur) + { + return cur.Documents.Count(); + } + + [Test] + public void TestArrayInsert() + { + var inserts = DB["inserts"]; + var indoc1 = new Document(); + indoc1["song"] = "The Axe"; + indoc1["artist"] = "Tinsley Ellis"; + indoc1["year"] = 2006; + + var indoc2 = new Document(); + indoc2["song"] = "The Axe2"; + indoc2["artist"] = "Tinsley Ellis2"; + indoc2["year"] = 2008; + + inserts.Insert(new[] {indoc1, indoc2}); + + var result = inserts.FindOne(new Document().Add("song", "The Axe")); + Assert.IsNotNull(result); + Assert.AreEqual(2006, result["year"]); + + result = inserts.FindOne(new Document().Add("song", "The Axe2")); + Assert.IsNotNull(result); + Assert.AreEqual(2008, result["year"]); + } + + [Test] + public void TestCount() + { + var counts = DB["counts"]; + var top = 100; + for(var i = 0; i < top; i++) + counts.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam").Add("cnt", i)); + var cnt = counts.Count(); + Assert.AreEqual(top, cnt, "Count not the same as number of inserted records"); + } + + [Test] + public void TestCountInvalidCollection() + { + var counts = DB["counts_wtf"]; + Assert.AreEqual(0, counts.Count()); + } + + [Test] + public void TestCountWithSpec() + { + var counts = DB["counts_spec"]; + counts.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam").Add("cnt", 1)); + counts.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam").Add("cnt", 2)); + counts.Insert(new Document().Add("Last", "Corder").Add("First", "Sam").Add("cnt", 3)); + + Assert.AreEqual(2, counts.Count(new Document().Add("Last", "Cordr"))); + Assert.AreEqual(1, counts.Count(new Document().Add("Last", "Corder"))); + Assert.AreEqual(0, counts.Count(new Document().Add("Last", "Brown"))); + } + + [Test] + public void TestDelete() + { + var deletes = DB["deletes"]; + var doc = new Document(); + doc["y"] = 1; + doc["x"] = 2; + deletes.Insert(doc); + + var selector = new Document().Add("x", 2); + + var result = deletes.FindOne(selector); + Assert.IsNotNull(result); + Assert.AreEqual(1, result["y"]); + + deletes.Delete(selector); + result = deletes.FindOne(selector); + Assert.IsNull(result, "Shouldn't have been able to find a document that was deleted"); + } + + [Test] + public void TestFindAttributeLimit() + { + var query = new Document(); + query["j"] = 10; + var fields = new Document(); + fields["x"] = 1; + + var c = DB["finds"].Find(query, -1, 0, fields); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + Assert.AreEqual(4, result["x"]); + Assert.IsNull(result["j"]); + } + } + + [Test] + public void TestFindGTRange() + { + var query = new Document(); + query["j"] = new Document().Add("$gt", 20); + + var c = DB["finds"].Find(query); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + var j = result["j"]; + Assert.IsTrue(Convert.ToDouble(j) > 20); + } + } + + [Test] + public void TestFindNulls() + { + var query = new Document().Add("n", null); + var numnulls = DB["finds"].Count(query); + Assert.AreEqual(99, numnulls); + } + + [Test] + public void TestFindOne() + { + var query = new Document(); + query["j"] = 10; + var result = DB["finds"].FindOne(query); + Assert.IsNotNull(result); + Assert.AreEqual(4, result["x"]); + Assert.AreEqual(10, result["j"]); + } + + [Test] + public void TestFindOneNotThere() + { + var query = new Document(); + query["not_there"] = 10; + var result = DB["finds"].FindOne(query); + Assert.IsNull(result); + } + + [Test] + public void TestFindOneObjectContainingUKPound() + { + var query = new Document(); + var result = DB["charreads"].FindOne(query); + Assert.IsNotNull(result); + Assert.IsTrue(result.ContainsKey("test")); + Assert.AreEqual("1234£56", result["test"]); + } + + [Test] + public void TestFindWhereEquivalency() + { + var col = DB["finds"]; + var lt = new Document().Add("j", new Document().Add("$lt", 5)); + var where = "this.j < 5"; + var explicitWhere = new Document().Add("$where", new Code(where)); + var func = new CodeWScope("function() { return this.j < 5; }", new Document()); + var funcDoc = new Document().Add("$where", func); + + Assert.AreEqual(4, CountDocs(col.Find(lt)), "Basic find didn't return 4 docs"); + Assert.AreEqual(4, CountDocs(col.Find(where)), "String where didn't return 4 docs"); + Assert.AreEqual(4, CountDocs(col.Find(explicitWhere)), "Explicit where didn't return 4 docs"); + Assert.AreEqual(4, CountDocs(col.Find(funcDoc)), "Function where didn't return 4 docs"); + } + + [Test] + public void TestFindAndModifyReturnsOldDocument() { + IMongoCollection collection = DB["find_and_modify"]; + Document person = new Document().Append("First", "Sally").Append("Last", "Simmons"); + collection.Insert(person); + + Document spec = new Document().Append("_id", person["_id"]); + Document loaded = collection.FindAndModify(new Document().Append("First", "Jane"), spec); + + Assert.AreEqual("Sally", loaded["First"]); + } + + [Test] + public void TestFindAndModifyReturnsNewDocument() { + IMongoCollection collection = DB["find_and_modify"]; + Document person = new Document().Append("First", "Susie").Append("Last", "O'Hara"); + collection.Insert(person); + + Document spec = new Document().Append("_id", person["_id"]); + Document loaded = collection.FindAndModify(new Document().Append("First", "Darlene"), spec, true); + + Assert.AreEqual("Darlene", loaded["First"]); + } + + [Test] + public void TestFindAndModifySortsResults() { + IMongoCollection collection = DB["find_and_modify"]; + Document doc1 = new Document().Append("handled", false).Append("priority", 1).Append("value", "Test 1"); + Document doc2 = new Document().Append("handled", false).Append("priority", 2).Append("value", "Test 2"); + collection.Insert(doc1); + collection.Insert(doc2); + + Document update = new Document().Append("handled", true); + Document spec = new Document().Append("handled", false); + Document sort = new Document().Append("priority", -1); + Document loaded = collection.FindAndModify(update, spec, sort, true); + + Assert.AreEqual(true, loaded["handled"]); + Assert.AreEqual(doc2["priority"], loaded["priority"]); + Assert.AreEqual(doc2["value"], loaded["value"]); + } + + [Test] + public void TestFindAndModifyReturnNullForNoRecordFound() { + IMongoCollection collection = DB["find_and_modify"]; + Document spec = new Document().Append("FirstName", "Noone"); + Document loaded = collection.FindAndModify(new Document().Append("First", "Darlene"), spec, true); + + Assert.IsNull(loaded, "Should return null for no document found"); + } + + [Test] + public void TestInsertBulkLargerThan4MBOfDocuments() + { + var b = new Binary(new byte[1024*1024*2]); + var inserts = DB["inserts"]; + try + { + var docs = new Document[10]; + //6MB+ of documents + for(var x = 0; x < docs.Length; x++) + docs[x] = new Document {{"name", "bulk"}, {"b", b}, {"x", x}}; + inserts.Insert(docs, true); + var count = inserts.Count(new Document {{"name", "bulk"}}); + Assert.AreEqual(docs.Length, count, "Wrong number of documents inserted"); + } + catch(MongoException) + { + Assert.Fail("MongoException should not have been thrown."); + } + } + + [Test] + public void TestInsertLargerThan4MBDocument() + { + var b = new Binary(new byte[1024*1024]); + var big = new Document {{"name", "Big Document"}, {"b1", b}, {"b2", b}, {"b3", b}, {"b4", b}}; + var inserts = DB["inserts"]; + var thrown = false; + try + { + inserts.Insert(big); + } + catch(MongoException) + { + thrown = true; + } + catch(Exception e) + { + Assert.Fail("Wrong Exception thrown " + e.GetType().Name); + } + Assert.IsTrue(thrown, "Shouldn't be able to insert large document"); + } + + [Test] + public void TestInsertOfArray() + { + var ogen = new OidGenerator(); + var inserts = DB["inserts"]; + var album = new Document(); + album["_id"] = ogen.Generate(); + album["artist"] = "Popa Chubby"; + album["title"] = "Deliveries After Dark"; + album["songs"] = new[] + { + new Document().Add("title", "Let The Music Set You Free").Add("length", "5:15").Add("_id", ogen.Generate()), + new Document().Add("title", "Sally Likes to Run").Add("length", "4:06").Add("_id", ogen.Generate()), + new Document().Add("title", "Deliveries After Dark").Add("length", "4:17").Add("_id", ogen.Generate()), + new Document().Add("title", "Theme From The Godfather").Add("length", "3:06").Add("_id", ogen.Generate()), + new Document().Add("title", "Grown Man Crying Blues").Add("length", "8:09").Add("_id", ogen.Generate()), + }; + inserts.Insert(album); + + var result = inserts.FindOne(new Document().Add("songs.title", "Deliveries After Dark")); + Assert.IsNotNull(result); + + Assert.AreEqual(album.ToString(), result.ToString()); + } + + [Test] + public void TestManualWhere() + { + var query = new Document().Add("$where", new Code("this.j % 2 == 0")); + var c = DB["finds"].Find(query); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + var j = result["j"]; + Assert.IsTrue(Convert.ToInt32(j)%2 == 0); + } + } + + [Test] + public void TestPoundSymbolInsert() + { + var inserts = DB["inserts"]; + var indoc = new Document().Add("x", "1234" + POUND + "56").Add("y", 1); + inserts.Insert(indoc); + + var result = inserts.FindOne(new Document().Add("x", "1234" + POUND + "56")); + Assert.IsNotNull(result); + Assert.AreEqual(1, result["y"]); + } + + [Test] + public void TestReallySimpleInsert() + { + var inserts = DB["inserts"]; + var indoc = new Document(); + indoc["y"] = 1; + indoc["x"] = 2; + inserts.Insert(indoc); + + var result = inserts.FindOne(new Document().Add("x", 2)); + Assert.IsNotNull(result); + Assert.AreEqual(1, result["y"]); + } + + [Test] + public void TestSave() + { + var saves = DB["saves"]; + var count = 100; + for(var i = 0; i < count; i++) + saves.Save(new Document {{"x", i}, {"desc", "This document is number: " + i}, {"y", 1}}); + Assert.AreEqual(count, saves.Count(new Document {{"y", 1}})); + + using(var cur = saves.FindAll()) + { + foreach(var d in cur.Documents) + { + d["y"] = Convert.ToInt32(d["y"]) + 1; + saves.Save(d); + } + } + Assert.AreEqual(count, saves.Count(new Document {{"y", 2}})); + } + + [Test] + public void TestSaveInsertDocumentIfExists() + { + var saves = DB["updates"]; + saves.Delete(new Document()); + + var document1 = new Document("name", "Alien1"); + saves.Insert(document1); + var document2 = new Document("name", "Alien2"); + saves.Insert(document2); + + document1["name"] = "Sam"; + saves.Save(document1); + document2["name"] = "Steve"; + saves.Save(document2); + + var array = saves.FindAll().Documents.ToArray(); + Assert.AreEqual(2, array.Length); + Assert.AreEqual("Sam", array[0]["name"]); + Assert.AreEqual("Steve", array[1]["name"]); + } + + [Test] + public void TestSaveInsertDocumentIfNotExists() + { + var saves = DB["updates"]; + saves.Delete(new Document()); + + saves.Save(new Document("name", "Sam")); + saves.Save(new Document("name", "Steve")); + + var array = saves.FindAll().Documents.ToArray(); + Assert.AreEqual(2, array.Length); + Assert.AreEqual("Sam", array[0]["name"]); + Assert.AreEqual("Steve", array[1]["name"]); + } + + [Test] + public void TestSimpleInsert() + { + var inserts = DB["inserts"]; + var indoc = new Document(); + indoc["song"] = "Palmdale"; + indoc["artist"] = "Afroman"; + indoc["year"] = 1999; + + inserts.Insert(indoc); + + var result = inserts.FindOne(new Document().Add("song", "Palmdale")); + Assert.IsNotNull(result); + Assert.AreEqual(1999, result["year"]); + } + + [Test] + public void TestUpdateMany() + { + var updates = DB["updates"]; + + updates.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam")); + updates.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam2")); + updates.Insert(new Document().Add("Last", "Cordr").Add("First", "Sam3")); + + var selector = new Document().Add("Last", "Cordr"); + var results = updates.Find(selector); + var found = false; + foreach(var doc in results.Documents) + { + Assert.AreEqual("Cordr", doc["Last"]); + found = true; + } + Assert.IsTrue(found, "Should have found docs inserted for TestUpdateMany"); + Assert.AreEqual(3, updates.Count(selector), "Didn't find all Documents inserted for TestUpdateMany with Selector"); + + //Document updateData = new Document().Append("$set", new Document().Append("Last", "Corder2")); + var updateData = new Document().Add("Last", "Corder2"); + updates.UpdateAll(updateData, selector); + + selector["Last"] = "Corder2"; + Assert.AreEqual(3, updates.Count(selector), "Not all Cordr documents were updated"); + + results = updates.Find(selector); + found = false; + foreach(var doc in results.Documents) + { + Assert.AreEqual("Corder2", doc["Last"]); + Assert.IsNotNull(doc["First"], "First name should not disappear"); + found = true; + } + Assert.IsTrue(found, "Should have found docs updated for TestMany"); + } + + [Test] + public void TestUpdatePartial() + { + var updates = DB["updates"]; + var coolness = 5; + var einstein = new Document {{"Last", "Einstien"}, {"First", "Albert"}, {"Coolness", coolness++}}; + updates.Insert(einstein); + var selector = new Document {{"_id", einstein["_id"]}}; + + updates.Update(new Document {{"$inc", new Document {{"Coolness", 1}}}}, selector); + Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector)["Coolness"]), "Coolness field not incremented", true); + + updates.Update(new Document + { + {"$set", new Document {{"Last", "Einstein"}}}, + {"$inc", new Document {{"Coolness", 1}}} + }, + selector, + true); + Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector)["Coolness"]), "Coolness field not incremented"); + } + + [Test] + public void TestUpdateUpsertExisting() + { + var updates = DB["updates"]; + var doc = new Document(); + doc["First"] = "Mtt"; + doc["Last"] = "Brewer"; + + updates.Insert(doc); + + var selector = new Document().Add("Last", "Brewer"); + doc = updates.FindOne(selector); + Assert.IsNotNull(doc); + Assert.AreEqual("Mtt", doc["First"]); + Assert.IsNotNull(doc["_id"]); + + doc["First"] = "Matt"; + updates.Update(doc); + + var result = updates.FindOne(selector); + Assert.IsNotNull(result); + Assert.AreEqual("Matt", result["First"]); + } + + [Test] + public void TestUpdateUpsertNotExisting() + { + var updates = DB["updates"]; + var doc = new Document(); + doc["First"] = "Sam"; + doc["Last"] = "CorderNE"; + + updates.Update(doc); + var selector = new Document().Add("Last", "CorderNE"); + var result = updates.FindOne(selector); + Assert.IsNotNull(result); + Assert.AreEqual("Sam", result["First"]); + } + + [Test] + public void TestWhere() + { + var c = DB["finds"].Find("this.j % 2 == 0"); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + var j = result["j"]; + Assert.IsTrue(Convert.ToInt32(j)%2 == 0); + } + } + + [Test] + public void TestHandlingRetrievalOfUnderscoredFieldsAfterFindAndModify() + { + var updates = DB["updates"]; + var id = Guid.NewGuid(); + var doc = new Document() + .Add("_id", id) + .Add("FirstName", "David") + .Add("LastName", "Beckham") + .Add("Embedded", new Document().Add("Types", 1)); + updates.Insert(doc); + var find = new Document().Add("_id", id); + var modify = new Document().Add("LastName", "Copperfield"); + var returned = updates.FindAndModify(modify, find, true); + Assert.IsInstanceOfType(typeof(Document), returned["Embedded"]); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestCollectionMetaData.cs b/source/MongoDB.Tests/IntegrationTests/TestCollectionMetaData.cs similarity index 77% rename from MongoDB.Net-Tests/TestCollectionMetaData.cs rename to source/MongoDB.Tests/IntegrationTests/TestCollectionMetaData.cs index 1e442e39..1f59e6a9 100644 --- a/MongoDB.Net-Tests/TestCollectionMetaData.cs +++ b/source/MongoDB.Tests/IntegrationTests/TestCollectionMetaData.cs @@ -1,105 +1,100 @@ -using System; -using System.Collections.Generic; - -using NUnit.Framework; -using MongoDB.Driver.Bson; - -namespace MongoDB.Driver -{ - [TestFixture] - public class TestCollectionMetaData : MongoTestBase - { - Database adminDb; - - String adminuser = "adminuser"; - String adminpass = "admin1234"; - - public override string TestCollections { - get { - return "indextests,rename,renamed"; - } - } - - public override void OnInit (){ - IMongoCollection its = DB["indextests"]; - its.Insert(createDoc("S","A","Anderson","OH")); - its.Insert(createDoc("T","B","Delhi","OH")); - its.Insert(createDoc("F","B","Cincinnati","OH")); - its.Insert(createDoc("U","D","Newtown","OH")); - its.Insert(createDoc("J","E","Newport","KY")); - - adminDb = DB.GetSisterDatabase("admin"); - //adminDb.MetaData.AddUser(adminuser, adminpass); - } - - public override void OnDispose (){ - //adminDb.MetaData.RemoveUser(adminuser); +using System; +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestCollectionMetaData : MongoTestBase + { + MongoDatabase adminDb; + + public override string TestCollections { + get { + return "indextests,rename,renamed"; + } } - - [Test] - public void TestGetOptions(){ - CollectionMetaData cmd = DB["reads"].MetaData; - Document options = cmd.Options; - Assert.IsNotNull(options); - } - - [Test] - public void TestGetIndexes(){ - CollectionMetaData cmd = DB["indextests"].MetaData; - Dictionary indexes = cmd.Indexes; - - Assert.IsNotNull(indexes); - Assert.IsTrue(indexes.Count > 0, "Should have been at least one index found."); - foreach(string key in indexes.Keys){ - System.Console.WriteLine(String.Format("Key: {0} Value: {1}", key, indexes[key])); - } - } - - [Test] - public void TestCreateIndex(){ - CollectionMetaData cmd = DB["indextests"].MetaData; - cmd.CreateIndex("lastnames", new Document().Append("lname", IndexOrder.Ascending), false); - Dictionary indexes = cmd.Indexes; - Assert.IsNotNull(indexes["lastnames"]); - } - - [Test] - public void TestCreateIndexNoNames(){ - CollectionMetaData cmd = DB["indextests"].MetaData; - cmd.CreateIndex(new Document().Append("lname", IndexOrder.Ascending).Append("fname",IndexOrder.Ascending), true); - Dictionary indexes = cmd.Indexes; - Assert.IsNotNull(indexes["_lname_fname_unique_"]); - } - - [Test] - public void TestDropIndex(){ - CollectionMetaData cmd = DB["indextests"].MetaData; - cmd.CreateIndex("firstnames", new Document().Append("fname", IndexOrder.Ascending), false); - Dictionary indexes = cmd.Indexes; - Assert.IsNotNull(indexes["firstnames"]); - cmd.DropIndex("firstnames"); - Assert.IsFalse(cmd.Indexes.ContainsKey("firstnames")); - } - - [Test] + + public override void OnInit (){ + IMongoCollection its = DB["indextests"]; + its.Insert(createDoc("S","A","Anderson","OH")); + its.Insert(createDoc("T","B","Delhi","OH")); + its.Insert(createDoc("F","B","Cincinnati","OH")); + its.Insert(createDoc("U","D","Newtown","OH")); + its.Insert(createDoc("J","E","Newport","KY")); + + adminDb = DB.GetSisterDatabase("admin"); + //adminDb.MetaData.AddUser(adminuser, adminpass); + } + + public override void OnDispose (){ + //adminDb.MetaData.RemoveUser(adminuser); + } + + [Test] + public void TestGetOptions(){ + CollectionMetadata cmd = DB["reads"].Metadata; + Document options = cmd.Options; + Assert.IsNotNull(options); + } + + [Test] + public void TestGetIndexes(){ + CollectionMetadata cmd = DB["indextests"].Metadata; + Dictionary indexes = cmd.Indexes; + + Assert.IsNotNull(indexes); + Assert.IsTrue(indexes.Count > 0, "Should have been at least one index found."); + foreach(string key in indexes.Keys){ + System.Console.WriteLine(String.Format("Key: {0} Value: {1}", key, indexes[key])); + } + } + + [Test] + public void TestCreateIndex(){ + CollectionMetadata cmd = DB["indextests"].Metadata; + cmd.CreateIndex("lastnames", new Document().Add("lname", IndexOrder.Ascending), false); + Dictionary indexes = cmd.Indexes; + Assert.IsNotNull(indexes["lastnames"]); + } + + [Test] + public void TestCreateIndexNoNames(){ + CollectionMetadata cmd = DB["indextests"].Metadata; + cmd.CreateIndex(new Document().Add("lname", IndexOrder.Ascending).Add("fname", IndexOrder.Ascending), true); + Dictionary indexes = cmd.Indexes; + Assert.IsNotNull(indexes["_lname_fname_unique_"]); + } + + [Test] + public void TestDropIndex(){ + CollectionMetadata cmd = DB["indextests"].Metadata; + cmd.CreateIndex("firstnames", new Document().Add("fname", IndexOrder.Ascending), false); + Dictionary indexes = cmd.Indexes; + Assert.IsNotNull(indexes["firstnames"]); + cmd.DropIndex("firstnames"); + Assert.IsFalse(cmd.Indexes.ContainsKey("firstnames")); + } + + [Test] public void TestRename(){ - DB["rename"].Insert(new Document(){{"test", "rename"}}); + DB["rename"].Insert(new Document(){{"test", "rename"}}); Assert.AreEqual(1, DB["rename"].Count()); - CollectionMetaData cmd = DB["rename"].MetaData; - cmd.Rename("renamed"); - Assert.IsFalse(DB.GetCollectionNames().Contains(DB.Name + ".rename"), "Shouldn't have found collection"); - Assert.IsTrue(DB.GetCollectionNames().Contains(DB.Name + ".renamed"),"Should have found collection"); + CollectionMetadata cmd = DB["rename"].Metadata; + cmd.Rename("renamed"); + Assert.IsFalse(DB.GetCollectionNames().Contains(DB.Name + ".rename"), "Shouldn't have found collection"); + Assert.IsTrue(DB.GetCollectionNames().Contains(DB.Name + ".renamed"),"Should have found collection"); Assert.AreEqual(1, DB["renamed"].Count()); - } - - protected Document createDoc(string fname, string lname, string city, string state){ - Document doc = new Document(); - doc["fname"] = fname; - doc["lname"] = lname; - doc["city"] = city; - doc["state"] = state; - return doc; - } - - } -} + } + + protected Document createDoc(string fname, string lname, string city, string state){ + Document doc = new Document(); + doc["fname"] = fname; + doc["lname"] = lname; + doc["city"] = city; + doc["state"] = state; + return doc; + } + + } +} diff --git a/MongoDB.Net-Tests/TestCollectionSafeMode.cs b/source/MongoDB.Tests/IntegrationTests/TestCollectionSafeMode.cs similarity index 93% rename from MongoDB.Net-Tests/TestCollectionSafeMode.cs rename to source/MongoDB.Tests/IntegrationTests/TestCollectionSafeMode.cs index b0da9044..de19bf33 100644 --- a/MongoDB.Net-Tests/TestCollectionSafeMode.cs +++ b/source/MongoDB.Tests/IntegrationTests/TestCollectionSafeMode.cs @@ -1,9 +1,7 @@ -using System; - -using NUnit.Framework; - - -namespace MongoDB.Driver +using System; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests { [TestFixture] public class TestCollectionSafeMode : MongoTestBase @@ -13,11 +11,9 @@ public override string TestCollections { return "safeinsert, safeupdate, safedelete, safemupdate"; } } - - [Test] - public void TestBadInsert(){ + public void TestBadInsert(){ IMongoCollection col = InitCollection("safeinsert"); bool thrown = false; try{ @@ -31,7 +27,7 @@ public void TestBadInsert(){ } [Test] - public void TestBadUpdate(){ + public void TestBadUpdate(){ IMongoCollection col = InitCollection("safeupdate"); bool thrown = false; try{ @@ -48,7 +44,7 @@ public void TestBadUpdate(){ } [Test] - public void TestMultiUpdate(){ + public void TestMultiUpdate(){ IMongoCollection col = InitCollection("safemupdate"); Document newy = new Document(){{"y", 2}}; col.UpdateAll(newy, new Document(){{"y",1}},true); @@ -66,11 +62,12 @@ public void TestMultiUpdate(){ Assert.Fail(String.Format("Wrong exception thrown: {0}", e.GetType().Name)); } Assert.IsTrue(thrown, "Exception not thrown."); - } - - protected IMongoCollection InitCollection(string name){ + } + + protected IMongoCollection InitCollection(string name) + { IMongoCollection col = DB[name]; - col.MetaData.CreateIndex(new Document{{"x", IndexOrder.Ascending}}, true); + col.Metadata.CreateIndex(new Document{{"x", IndexOrder.Ascending}}, true); for(int x = 0; x < 5; x++){ col.Insert(new Document{{"x", x}, {"y", 1}}); } diff --git a/source/MongoDB.Tests/IntegrationTests/TestCollection_1.cs b/source/MongoDB.Tests/IntegrationTests/TestCollection_1.cs new file mode 100644 index 00000000..3df6b1d3 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestCollection_1.cs @@ -0,0 +1,412 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Attributes; +using MongoDB.Util; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestCollection_1 : MongoTestBase + { + private const string POUND = "\u00a3"; + + private class CountsEntity + { + public Oid Id { get; set; } + + public string Last { get; set; } + + public string First { get; set; } + + [MongoAlias("cnt")] + public int Coolness { get; set; } + } + + private class FindsEntity + { + public int x { get; set; } + + [MongoAlias("h")] + public string Text { get; set; } + + [MongoAlias("j")] + public int Index { get; set; } + + public int n { get; set; } + } + + private class CharReadsEntity + { + public string test { get; set; } + } + + private class InsertsEntity + { + [MongoAlias("song")] + public string Song { get; set; } + + [MongoAlias("artist")] + public string Artist { get; set; } + + [MongoAlias("year")] + public int Year { get; set; } + } + + private class Album + { + [MongoAlias("artist")] + public string Artist { get; set; } + + [MongoAlias("title")] + public string Title { get; set; } + + [MongoAlias("songs")] + public List Songs { get; set; } + } + + private class Song + { + [MongoAlias("title")] + public string Title { get; set; } + + [MongoAlias("length")] + public string Length { get; set; } + } + + private class AlbumCase + { + public AlbumCase(){ + Album = new Album(); + } + + public Album Album { get; set; } + } + + private class DeletesEntity + { + public int x { get; set; } + public int y { get; set; } + } + + public override string TestCollections{ + get { return "inserts,updates,counts,counts_spec,finds,charreads,saves"; } + } + + public override void OnInit(){ + var finds = DB["finds"]; + for(var j = 1; j < 100; j++) + finds.Insert(new Document {{"x", 4}, {"h", "hi"}, {"j", j}}); + for(var j = 100; j < 105; j++) + finds.Insert(new Document {{"x", 4}, {"n", 1}, {"j", j}}); + var charreads = DB["charreads"]; + charreads.Insert(new Document {{"test", "1234" + POUND + "56"}}); + } + + [Test] + public void TestArrayInsert(){ + var inserts = DB.GetCollection("inserts"); + var indoc1 = new {Song = "The Axe", Artist = "Tinsley Ellis", Year = 2006}; + var indoc2 = new {Song = "The Axe2", Artist = "Tinsley Ellis2", Year = 2008}; + + inserts.Insert(new[] {indoc1, indoc2}); + + var result = inserts.FindOne(new Document().Add("Song", "The Axe")); + Assert.IsNotNull(result); + Assert.AreEqual(2006, result.Year); + + result = inserts.FindOne(new Document().Add("Song", "The Axe2")); + Assert.IsNotNull(result); + Assert.AreEqual(2008, result.Year); + } + + [Test] + public void TestCanInsertNullPropertys(){ + var inserts = DB.GetCollection("inserts"); + + inserts.Insert(new CharReadsEntity()); + } + + [Test] + public void TestCount(){ + var counts = DB.GetCollection("counts"); + var top = 100; + for(var i = 0; i < top; i++) + counts.Insert(new CountsEntity {Last = "Cordr", First = "Sam", Coolness = i}); + var cnt = counts.Count(); + Assert.AreEqual(top, cnt, "Count not the same as number of inserted records"); + } + + [Test] + public void TestCountInvalidCollection(){ + var counts = DB.GetCollection("counts_wtf"); + Assert.AreEqual(0, counts.Count()); + } + + [Test] + public void TestCountWithSpec(){ + var counts = DB.GetCollection("counts_spec"); + counts.Insert(new CountsEntity {Last = "Cordr", First = "Sam", Coolness = 1}); + counts.Insert(new CountsEntity {Last = "Cordr", First = "Sam", Coolness = 2}); + counts.Insert(new CountsEntity {Last = "Corder", First = "Sam", Coolness = 3}); + + Assert.AreEqual(2, counts.Count(new {Last = "Cordr"})); + Assert.AreEqual(1, counts.Count(new {Last = "Corder"})); + Assert.AreEqual(0, counts.Count(new {Last = "Brown"})); + } + + [Test] + public void TestDelete(){ + var deletes = DB.GetCollection("deletes"); + deletes.Insert(new {x = 2, y = 1}); + + var selector = new {x = 2}; + + var result = deletes.FindOne(selector); + Assert.IsNotNull(result); + Assert.AreEqual(1, result.y); + + deletes.Delete(selector); + result = deletes.FindOne(selector); + Assert.IsNull(result, "Shouldn't have been able to find a document that was deleted"); + } + + [Test] + public void TestFindAttributeLimit(){ + var query = new {Index = 10}; + var fields = new {x = 1}; + var c = DB.GetCollection("finds").Find(query, -1, 0, fields); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + Assert.AreEqual(4, result.x); + Assert.AreEqual(0, result.Index); + } + } + + [Test] + public void TestFindGTRange(){ + var query = new {Index = Op.GreaterThan(20)}; + var c = DB.GetCollection("finds").Find(query); + foreach(var result in c.Documents) + { + Assert.IsNotNull(result); + Assert.Greater(result.Index, 20); + } + } + + [Test] + public void TestFindNulls(){ + var query = new {Text = (string)null}; + var numnulls = DB.GetCollection("finds").Count(query); + Assert.AreEqual(5, numnulls); + } + + [Test] + public void TestFindOne(){ + var query = new {Index = 10}; + var result = DB.GetCollection("finds").FindOne(query); + Assert.IsNotNull(result); + Assert.AreEqual(4, result.x); + Assert.AreEqual(10, result.Index); + } + + [Test] + public void TestFindOneNotThere(){ + var query = new {not_there = 10}; + var result = DB.GetCollection("finds").FindOne(query); + Assert.IsNull(result); + } + + [Test] + public void TestFindOneObjectContainingUKPound(){ + var query = new Document(); + var result = DB.GetCollection("charreads").FindOne(query); + Assert.IsNotNull(result); + Assert.AreEqual("1234£56", result.test); + } + + [Test] + public void TestFindWhereEquivalency(){ + var col = DB.GetCollection("finds"); + var lt = new {Index = Op.LessThan(5)}; + var where = "this.j < 5"; + var explicitWhere = new Document().Add("$where", new Code(where)); + var func = new CodeWScope("function() { return this.j < 5; }", new Document()); + var funcDoc = new Document().Add("$where", func); + + Assert.AreEqual(4, col.Find(lt).Documents.Count(), "Basic find didn't return 4 docs"); + Assert.AreEqual(4, col.Find(where).Documents.Count(), "String where didn't return 4 docs"); + Assert.AreEqual(4, col.Find(explicitWhere).Documents.Count(), "Explicit where didn't return 4 docs"); + Assert.AreEqual(4, col.Find(funcDoc).Documents.Count(), "Function where didn't return 4 docs"); + } + + [Test] + public void TestInsertBulkLargerThan4MBOfDocuments(){ + var b = new Binary(new byte[1024*1024*2]); + var inserts = DB.GetCollection("inserts"); + try + { + //6MB+ of documents + var docs = from i in Enumerable.Range(1, 10) + select new {Song = "Bulk", bin = b, Year = i}; + + inserts.Insert(docs, true); + var count = inserts.Count(new Document("Song", "Bulk")); + Assert.AreEqual(docs.Count(), count, "Wrong number of documents inserted"); + } + catch(MongoException) + { + Assert.Fail("MongoException should not have been thrown."); + } + } + + [Test] + public void TestInsertOfArray(){ + var ogen = new OidGenerator(); + var inserts = DB.GetCollection("inserts"); + var album = new Album {Title = "Deliveries After Dark", Artist = "Popa Chubby"}; + album.Songs = new List + { + new Song {Title = "Let The Music Set You Free", Length = "5:15"}, + new Song {Title = "Sally Likes to Run", Length = "4:06"}, + new Song {Title = "Deliveries After Dark", Length = "4:17"}, + new Song {Title = "Theme From The Godfather", Length = "3:06"}, + new Song {Title = "Grown Man Crying Blues", Length = "8:09"} + }; + inserts.Insert(album); + + var result = inserts.FindOne(new Document().Add("Songs.Title", "Deliveries After Dark")); + Assert.IsNotNull(result); + + Assert.AreEqual(album.Songs.Count, result.Songs.Count); + } + + [Test] + public void TestSimpleInsert(){ + var inserts = DB.GetCollection("inserts"); + var indoc = new InsertsEntity {Artist = "Afroman", Song = "Palmdale", Year = 1999}; + inserts.Insert(indoc); + + var result = inserts.FindOne(new {Song = "Palmdale"}); + Assert.IsNotNull(result); + Assert.AreEqual(indoc.Year, result.Year); + } + + [Test] + public void TestUpdateMany(){ + var updates = DB.GetCollection("updates"); + + updates.Insert(new CountsEntity {Last = "Cordr", First = "Sam"}); + updates.Insert(new CountsEntity {Last = "Cordr", First = "Sam2"}); + updates.Insert(new CountsEntity {Last = "Cordr", First = "Sam3"}); + + var selector = new {Last = "Cordr"}; + var results = updates.Find(selector); + Assert.AreEqual(3, results.Documents.Count(), "Didn't find all Documents inserted for TestUpdateMany with Selector"); + + var updateData = new {Last = "Cordr2"}; + updates.UpdateAll(updateData, selector); + + selector = new {Last = "Cordr2"}; + results = updates.Find(selector); + var count = 0; + foreach(var doc in results.Documents) + { + count++; + Assert.AreEqual("Cordr2", doc.Last); + Assert.IsNotNull(doc.First, "First name should not disappear"); + } + + Assert.AreEqual(3, count, "Didn't find all documents for updated."); + } + + [Test] + public void TestUpdatePartial(){ + var updates = DB.GetCollection("updates"); + var coolness = 5; + var einstein = new CountsEntity {Last = "Einstein", First = "Albret", Coolness = coolness++}; + updates.Insert(einstein); + var selector = new {Last = "Einstein"}; + + updates.Update(new Document {{"$inc", new Document("cnt", 1)}}, selector); + Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector).Coolness), "Coolness field not incremented", true); + + updates.Update(new Document + { + {"$set", new {First = "Albert"}}, + {"$inc", new Document {{"cnt", 1}}} + }, + selector, + true); + Assert.AreEqual(coolness++, Convert.ToInt32(updates.FindOne(selector).Coolness), "Coolness field not incremented"); + } + + [Test] + public void TestUpdateUpsertExisting(){ + var updates = DB.GetCollection("updates"); + var doc = new CountsEntity {First = "Mtt", Last = "Brewer"}; + + updates.Insert(doc); + + var selector = new {Last = "Brewer"}; + doc = updates.FindOne(selector); + Assert.IsNotNull(doc); + Assert.AreEqual("Mtt", doc.First); + Assert.IsNotNull(doc.Id); + + doc.First = "Matt"; + updates.Update(doc); + + var result = updates.FindOne(selector); + Assert.IsNotNull(result); + Assert.AreEqual("Matt", result.First); + } + + [Test] + public void TestUpdateUpsertNotExisting(){ + var updates = DB.GetCollection("updates"); + var doc = new CountsEntity {First = "Sam", Last = "CorderNE"}; + + updates.Update(doc); + var result = updates.FindOne(new {Last = "CorderNE"}); + Assert.IsNotNull(result); + Assert.AreEqual("Sam", result.First); + } + + [Test] + public void CanSaveNewDocumentsWithoutId(){ + var saves = DB.GetCollection("saves"); + saves.Save(new Document("WithoutId", 1.0)); + + var result = saves.FindOne(new Document("WithoutId",1.0)); + Assert.IsNotNull(result); + } + + [Test] + public void CanSaveNewDocumentWithId(){ + var saves = DB.GetCollection("saves"); + saves.Save(new Document("WithId", 1.0).Add("_id", 5)); + + var result = saves.FindOne(new Document("_id", 5)); + Assert.IsNotNull(result); + Assert.AreEqual(result.Id,5); + } + + [Test] + public void SaveUpdatesExistsingDocument(){ + var saves = DB.GetCollection("saves"); + var updated = new Document("Existing", 1.0); + saves.Insert(updated); + + updated["Existing"] = 2.0; + + saves.Save(updated); + + var result = saves.FindOne(new Document("_id", updated.Id)); + Assert.IsNotNull(result); + Assert.AreEqual(result["Existing"], 2.0); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestConcurrency.cs b/source/MongoDB.Tests/IntegrationTests/TestConcurrency.cs similarity index 96% rename from MongoDB.Net-Tests/TestConcurrency.cs rename to source/MongoDB.Tests/IntegrationTests/TestConcurrency.cs index f18f9fcd..dce37946 100644 --- a/MongoDB.Net-Tests/TestConcurrency.cs +++ b/source/MongoDB.Tests/IntegrationTests/TestConcurrency.cs @@ -1,10 +1,9 @@ -using System; -using System.Collections.Generic; -using System.Threading; - -using NUnit.Framework; - -namespace MongoDB.Driver +using System; +using System.Collections.Generic; +using System.Threading; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests { [TestFixture()] @@ -21,8 +20,8 @@ public override string TestCollections { } } - public override void OnInit (){ - Collection col = (Collection)DB["threadsmallreads"]; + public override void OnInit (){ + var col = (IMongoCollection)DB["threadsmallreads"]; for(int j = 0; j < 4; j++){ col.Insert(new Document(){{"x", 4},{"j", j}}); } @@ -32,8 +31,8 @@ public override void OnInit (){ //[Test] public void TestMultiThreadedWrites (){ Mongo db = new Mongo(); - db.Connect(); - + db.Connect(); + IMongoCollection col = DB["threadinserts"]; List identifiers = new List{"A", "B", "C", "D"}; @@ -89,8 +88,8 @@ public void TestMultiThreadedReads(){ [Test] public void TestMultiThreadedReadsAndWrites(){ Mongo db = new Mongo(); - db.Connect(); - + db.Connect(); + IMongoCollection col = DB["threadreadinserts"]; List identifiers = new List{"A", "B", "C", "D"}; @@ -146,8 +145,8 @@ protected void RunAndWait(List threads){ public class Inserter{ public int Iterations{get; set;} public int Count{get;set;} - public String Identifier{get; set;} - public IMongoCollection Collection{get; set;} + public String Identifier{get; set;} + public IMongoCollection Collection { get; set; } public void DoInserts(){ for(int x = 0; x < this.Iterations; x++){ @@ -164,13 +163,14 @@ public void DoInserts(){ public class Reader{ public int Iterations{get; set;} - public int Count{get;set;} - public IMongoCollection Collection{get; set;} + public int Count{get;set;} + public IMongoCollection Collection { get; set; } public void DoReads(){ for(int x = 0; x < this.Iterations; x++){ - try{ - using(ICursor c = this.Collection.FindAll()){ + try{ + using(ICursor c = this.Collection.FindAll()) + { //Just read one and do nothing with the Document. foreach(Document d in c.Documents){ d["works"] = true; diff --git a/source/MongoDB.Tests/IntegrationTests/TestCursor.cs b/source/MongoDB.Tests/IntegrationTests/TestCursor.cs new file mode 100644 index 00000000..9aa7f993 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestCursor.cs @@ -0,0 +1,150 @@ +using System; +using System.Linq; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestCursor : MongoTestBase + { + public override string TestCollections + { + get { return "sorts,hintindex,smallreads,reads"; } + } + + public override void OnInit() + { + //smallreads + var smallreads = DB["smallreads"]; + for(var j = 1; j < 5; j++) + smallreads.Insert(new Document {{"x", 4}, {"j", j}}); + smallreads.Insert(new Document {{"x", 4}, {"j", 5}, {"n", 1}}); + + var reads = DB["reads"]; + for(var j = 1; j < 10000; j++) + reads.Insert(new Document {{"x", 4}, {"h", "hi"}, {"j", j}}); + } + + [Test] + public void TestCanLimit() + { + var c = DB["reads"].FindAll().Limit(5); + + Assert.IsNotNull(c, "Cursor shouldn't be null"); + var reads = c.Documents.Count(); + Assert.IsTrue(reads > 0, "No documents were returned."); + Assert.AreEqual(5, reads); + } + + [Test] + public void TestCanReadAndKillCursor() + { + var c = (Cursor)DB["reads"].FindAll(); + + Assert.IsNotNull(c, "Cursor shouldn't be null"); + c.Documents.Any(); + c.Dispose(); + Assert.AreEqual(0, c.Id); + } + + [Test] + public void TestCanReadMore() + { + var c = (Cursor)DB["reads"].FindAll(); + + Assert.IsNotNull(c, "Cursor shouldn't be null"); + var reads = 0; + var idchanges = 0; + long id = 0; + foreach(var doc in c.Documents) + { + reads++; + if(c.Id != id) + { + idchanges++; + id = c.Id; + } + } + Assert.IsTrue(reads > 0, "No documents were returned."); + Assert.IsTrue(idchanges > 0, String.Format("ReadMore message never sent. {0} changes seen", idchanges)); + Assert.AreEqual(9999, reads, "Not all documents returned."); + Console.Out.Write(String.Format("{0} records read", reads)); + } + + [Test] + public void TestCanReuseCursor() + { + var c = (Cursor)DB["reads"].FindAll(); + + Assert.IsNotNull(c, "Cursor shouldn't be null"); + + var firstCount = c.Documents.Count(); + var secondCount = c.Documents.Count(); + + Assert.AreEqual(firstCount,secondCount); + } + + [Test] + public void TestCanReadSmall() + { + var c = DB["smallreads"].FindAll(); + + Assert.IsNotNull(c, "Cursor shouldn't be null"); + var reads = c.Documents.Count(); + Assert.IsTrue(reads > 0, "No documents were returned."); + Assert.AreEqual(5, reads, "More than 5 documents in the small reads dataset"); + } + + [Test] + public void TestExplain() + { + var exp = DB["reads"].FindAll().Limit(5).Skip(5).Sort("x").Explain(); + Assert.IsTrue(exp.ContainsKey("cursor")); + Assert.IsTrue(exp.ContainsKey("n")); + Assert.IsTrue(exp.ContainsKey("nscanned")); + } + + [Test] + public void TestHint() + { + var reads = DB["reads"]; + var hint = new Document().Add("x", IndexOrder.Ascending); + + var exp = reads.FindAll().Hint(hint).Explain(); + Assert.IsTrue(exp.ContainsKey("$err"), "No error found"); + + reads.Metadata.CreateIndex("hintindex", hint, false); + exp = reads.FindAll().Hint(hint).Explain(); + + Assert.IsTrue(exp.ContainsKey("cursor")); + Assert.IsTrue(exp.ContainsKey("n")); + Assert.IsTrue(exp.ContainsKey("nscanned")); + } + + [Test] + public void TestSort() + { + var sorts = DB["sorts"]; + var randoms = new[] {4, 6, 8, 9, 1, 3, 2, 5, 7, 0}; + foreach(var x in randoms) + sorts.Insert(new Document().Add("x", randoms[x])); + Assert.AreEqual(randoms.Length, sorts.Count()); + + var exp = 0; + foreach(var doc in sorts.FindAll().Sort("x", IndexOrder.Ascending).Documents) + { + Assert.AreEqual(exp, Convert.ToInt32(doc["x"])); + exp++; + } + Assert.AreEqual(randoms.Length, exp); + + exp = 9; + foreach(var doc in sorts.FindAll().Sort("x", IndexOrder.Descending).Documents) + { + Assert.AreEqual(exp, Convert.ToInt32(doc["x"])); + exp--; + } + Assert.AreEqual(-1, exp); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/TestDatabase.cs b/source/MongoDB.Tests/IntegrationTests/TestDatabase.cs new file mode 100644 index 00000000..68d5dd1b --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestDatabase.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Generic; +using NUnit.Framework; +using System.Configuration; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestDatabase : MongoTestBase + { + public override string TestCollections{ + get { return "refs,noerror,errcol,preverror"; } + } + + [Test] + public void TestEvalNoScope(){ + var result = DB.Eval("function(){return 3;}"); + Assert.AreEqual(3, result["retval"]); + } + + [Test] + public void TestEvalWithScope(){ + var val = 3; + var scope = new Document().Add("x", val); + var result = DB.Eval("function(){return x;}", scope); + Assert.AreEqual(val, result["retval"]); + } + + [Test] + public void TestEvalWithScopeAsFunctionParameters(){ + var x = 3; + var y = 4; + var func = "adder = function(a, b){return a + b;}; return adder(x,y)"; + var scope = new Document().Add("x", x).Add("y", y); + var result = DB.Eval(func, scope); + Console.Out.WriteLine(result.ToString()); + Assert.AreEqual(x + y, result["retval"]); + } + + [Test] + public void TestFollowNonReference(){ + var id = new Oid("BAD067c30a57000000008ecb"); + var rf = new DBRef("refs", id); + + var target = DB.FollowReference(rf); + Assert.IsNull(target, "FollowReference returned wasn't null"); + } + + [Test] + public void TestFollowReference(){ + var refs = DB["refs"]; + var id = new Oid("4a7067c30a57000000008ecb"); + var msg = "this has an oid key"; + var doc = new Document {{"_id", id}, {"msg", msg}}; + refs.Insert(doc); + + var rf = new DBRef("refs", id); + + var target = DB.FollowReference(rf); + Assert.IsNotNull(target, "FollowReference returned null"); + Assert.IsTrue(target.ContainsKey("msg")); + Assert.AreEqual(msg, target["msg"]); + } + + [Test] + public void TestGetCollectionNames(){ + var names = DB.GetCollectionNames(); + Assert.IsNotNull(names, "No collection names returned"); + Assert.IsTrue(names.Count > 0); + Assert.IsTrue(names.Contains("tests.inserts")); + } + + [Test] + public void TestGetLastError(){ + var errcol = DB["errcol"]; + errcol.Metadata.CreateIndex(new Document {{"x", IndexOrder.Ascending}}, true); + var dup = new Document {{"x", 1}, {"y", 2}}; + errcol.Insert(dup); + var error = DB.GetLastError(); + Assert.AreEqual(null, error["err"]); + + errcol.Insert(dup); + error = DB.GetLastError(); + + Assert.IsFalse(null == error["err"]); + } + + [Test] + public void TestGetLastErrorNoError(){ + DB["noerror"].Insert(new Document {{"a", 1}, {"b", 2}}); + var error = DB.GetLastError(); + Assert.AreEqual(null, error["err"]); + } + + [Test] + public void TestGetPrevError(){ + var col = DB["preverror"]; + col.Metadata.CreateIndex(new Document {{"x", IndexOrder.Ascending}}, true); + var docs = new List(); + for(var x = 0; x < 10; x++) + docs.Add(new Document {{"x", x}, {"y", 2}}); + docs.Add(new Document {{"x", 1}, {"y", 4}}); //the dupe + DB.ResetError(); + Assert.AreEqual(null, DB.GetLastError()["err"]); + + col.Insert(docs); + var error = DB.GetLastError(); + + Assert.IsFalse(null == error["err"]); + } + + [Test] + public void TestReferenceNonOid(){ + var refs = DB["refs"]; + + var doc = new Document().Add("_id", 123).Add("msg", "this has a non oid key"); + refs.Insert(doc); + + var rf = new DBRef("refs", 123); + + var recv = DB.FollowReference(rf); + + Assert.IsNotNull(recv); + Assert.IsTrue(recv.ContainsKey("msg")); + Assert.AreEqual(recv["_id"], (long)123); + } + + [Test] + public void TestCanCreateFromConnectionString(){ + var builder = new MongoConnectionStringBuilder(ConfigurationManager.AppSettings["tests"]) {Database = "tests"}; + /* + using(var database = new MongoDatabase(builder.ToString())) + { + + }*/ + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/IntegrationTests/TestDatabaseJavascript.cs b/source/MongoDB.Tests/IntegrationTests/TestDatabaseJavascript.cs new file mode 100644 index 00000000..9555a14c --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestDatabaseJavascript.cs @@ -0,0 +1,209 @@ +using System; +using System.Linq; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestDatabaseJavascript : MongoTestBase + { + private DatabaseJavascript _javascript; + + public override string TestCollections + { + get { return "jsreads"; } + } + + public override void OnInit() + { + DB["system.js"].Delete(new Document()); + _javascript = DB.Javascript; + + var jsreads = DB["jsreads"]; + for(var j = 1; j < 10; j++) + jsreads.Insert(new Document {{"j", j}}); + } + + protected void AddFunction(string name) + { + var func = new Code("function(x,y){return x + y;}"); + DB["system.js"].Insert(new Document().Add("_id", name).Add("value", func)); + } + + [Test] + public void TestCanAddAFunctionDoc() + { + const string name = "fadddoc"; + var func = new Code("function(x, y){return x + y;}"); + var doc = new Document().Add("_id", name).Add("value", func); + _javascript.Add(doc); + Assert.IsNotNull(_javascript[name]); + } + + [Test] + public void TestCanAddAFunctionStrCode() + { + const string name = "faddsc"; + var func = new Code("function(x, y){return x + y;}"); + _javascript.Add(name, func); + Assert.IsNotNull(_javascript[name]); + } + + [Test] + public void TestCanAddAFunctionStrStr() + { + const string name = "faddss"; + var func = "function(x, y){return x + y;}"; + _javascript.Add(name, func); + Assert.IsNotNull(_javascript[name]); + } + + [Test] + public void TestCanAddFunctionByAssignment() + { + const string name = "fassignadd"; + var func = new Code("function(x,y){return x + y;}"); + var doc = new Document().Add("_id", name).Add("value", func); + _javascript[name] = doc; + Assert.IsNotNull(_javascript[name]); + } + + [Test] + public void TestCanGetAFunction() + { + const string name = "fget"; + AddFunction(name); + Assert.IsNotNull(_javascript[name]); + Assert.IsNotNull(_javascript.GetFunction(name)); + } + + [Test] + public void TestCanGetDatabaseJSObject() + { + Assert.IsNotNull(DB.Javascript); + } + + [Test] + public void TestCanListFunctions() + { + const string name = "flist"; + AddFunction(name); + var list = _javascript.GetFunctionNames(); + Assert.IsTrue(list.Count > 0); + + var found = false; + foreach(var l in list) + if(l == name) + found = true; + Assert.IsTrue(found, "Didn't find the function that was inserted."); + } + + [Test] + public void TestCannotAddAFunctionTwice() + { + const string name = "faddtwice"; + var func = new Code("function(x,y){return x + y;}"); + _javascript.Add(name, func); + var thrown = false; + try + { + _javascript.Add(name, func); + } + catch(ArgumentException) + { + thrown = true; + } + Assert.IsTrue(thrown, "Shouldn't be able to add a function twice"); + } + + [Test] + public void TestClear() + { + AddFunction("clear"); + Assert.IsTrue(_javascript.Count > 0); + _javascript.Clear(); + Assert.IsTrue(_javascript.Count == 0); + } + + [Test] + public void TestContains() + { + const string name = "fcontains"; + AddFunction(name); + Assert.IsTrue(_javascript.Contains(name)); + Assert.IsFalse(_javascript.Contains("none")); + Assert.IsTrue(_javascript.Contains(new Document().Add("_id", name).Add("value", new Code("dfs")))); + } + + [Test] + public void TestCopyTo() + { + const int count = 5; + var functions = new Document[count]; + var funcCode = new Code("function(x,y){return x+y;}"); + + for(var i = 0; i < count; i++) + { + var name = string.Format("_{0}fcopyTo", i); + _javascript[name] = new Document("_id", name).Add("value", funcCode); + } + + _javascript.CopyTo(functions, 1); + + Assert.IsNull(functions[0]); + Assert.IsNotNull(functions[1]); + Assert.IsNotNull(functions[4]); + + Assert.AreEqual("_1fcopyTo", functions[1]["_id"]); + Assert.IsTrue(((string)functions[1]["_id"]).StartsWith("_1")); //as long as no other _ named functions get in. + } + + [Test] + public void TestExec() + { + _javascript.Add("lt4", new Code("function(doc){return doc.j < 4;}")); + var cnt = DB["reads"].Find("lt4(this)").Documents.Count(); + Assert.AreEqual(3, cnt); + } + + [Test] + public void TestExecWithScope() + { + _javascript.Add("lt", new Code("function(doc){ return doc.j < limit;}")); + var scope = new Document().Add("limit", 5); + var query = new Document().Add("$where", new CodeWScope("lt(this)", scope)); + var cnt = DB["jsreads"].Find(query).Documents.Count(); + Assert.AreEqual(4, cnt); + } + + [Test] + public void TestForEach() + { + var name = "foreach"; + AddFunction(name); + var found = _javascript.Any(doc => name.Equals(doc["_id"])); + Assert.IsTrue(found, "Added function wasn't found during foreach"); + } + + [Test] + public void TestRemoveByDoc() + { + const string name = "fremoved"; + var func = new Document().Add("_id", name); + AddFunction(name); + Assert.IsTrue(_javascript.Contains(name)); + _javascript.Remove(func); + Assert.IsFalse(_javascript.Contains(name)); + } + + [Test] + public void TestRemoveByName() + { + const string name = "fremoven"; + AddFunction(name); + Assert.IsTrue(_javascript.Contains(name)); + _javascript.Remove(name); + Assert.IsFalse(_javascript.Contains(name)); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestDatabaseMetaData.cs b/source/MongoDB.Tests/IntegrationTests/TestDatabaseMetaData.cs similarity index 76% rename from MongoDB.Net-Tests/TestDatabaseMetaData.cs rename to source/MongoDB.Tests/IntegrationTests/TestDatabaseMetaData.cs index 853f89b7..c977ccdf 100644 --- a/MongoDB.Net-Tests/TestDatabaseMetaData.cs +++ b/source/MongoDB.Tests/IntegrationTests/TestDatabaseMetaData.cs @@ -1,9 +1,8 @@ -using System; -using System.Collections.Generic; - -using NUnit.Framework; - -namespace MongoDB.Driver +using System; +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests { [TestFixture] public class TestDatabaseMetaData : MongoTestBase @@ -15,13 +14,13 @@ public override string TestCollections { } public override void OnInit () { - //Add any new collections ones to work on. - DB["$cmd"].FindOne(new Document().Append("create","todrop")); + //Add any new collections ones to work on. + DB["$cmd"].FindOne(new Document().Add("create", "todrop")); } [Test] public void TestCreateCollectionNoOptions(){ - DB.MetaData.CreateCollection("creatednoopts"); + DB.Metadata.CreateCollection("creatednoopts"); List names = DB.GetCollectionNames(); Assert.IsTrue(names.Contains("tests.creatednoopts")); @@ -29,9 +28,9 @@ public void TestCreateCollectionNoOptions(){ } [Test] - public void TestCreateCollectionWithOptions(){ - Document options = new Document().Append("capped",true).Append("size",10000); - DB.MetaData.CreateCollection("createdcapped",options); + public void TestCreateCollectionWithOptions(){ + Document options = new Document().Add("capped", true).Add("size", 10000); + DB.Metadata.CreateCollection("createdcapped",options); List names = DB.GetCollectionNames(); Assert.IsTrue(names.Contains("tests.createdcapped")); @@ -39,9 +38,9 @@ public void TestCreateCollectionWithOptions(){ } [Test] - public void TestCreateCollectionWithInvalidOptions(){ - Document options = new Document().Append("invalidoption",true); - DB.MetaData.CreateCollection("createdinvalid",options); + public void TestCreateCollectionWithInvalidOptions(){ + Document options = new Document().Add("invalidoption", true); + DB.Metadata.CreateCollection("createdinvalid",options); List names = DB.GetCollectionNames(); Assert.IsTrue(names.Contains("tests.createdinvalid")); @@ -50,7 +49,7 @@ public void TestCreateCollectionWithInvalidOptions(){ [Test] public void TestDropCollection(){ - bool dropped = DB.MetaData.DropCollection("todrop"); + bool dropped = DB.Metadata.DropCollection("todrop"); Assert.IsTrue(dropped,"Dropped was false"); @@ -63,7 +62,7 @@ public void TestDropCollection(){ public void TestDropInvalidCollection(){ bool thrown = false; try{ - DB.MetaData.DropCollection("todrop_notexists"); + DB.Metadata.DropCollection("todrop_notexists"); }catch(MongoCommandException){ thrown = true; } diff --git a/source/MongoDB.Tests/IntegrationTests/TestMapReduce.cs b/source/MongoDB.Tests/IntegrationTests/TestMapReduce.cs new file mode 100644 index 00000000..139a5937 --- /dev/null +++ b/source/MongoDB.Tests/IntegrationTests/TestMapReduce.cs @@ -0,0 +1,126 @@ +using System; +using NUnit.Framework; + +namespace MongoDB.IntegrationTests +{ + [TestFixture] + public class TestMapReduce : MongoTestBase + { + private IMongoCollection _collection; + + private const string MapFunc = "function(){\n" + + " this.tags.forEach(\n" + + " function(z){\n" + + " emit( z , { count : 1 } );\n" + + " });\n" + + "};"; + + private const string ReduceFunc = "function( key , values ){\n" + + " var total = 0;\n" + + " for ( var i=0; i + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {C8BC95AB-25C6-4133-BC9F-8B6BB782CA02} + Library + MongoDB.Tests + + + 3.5 + + + false + v3.5 + MongoDB.Driver.Tests + C:\Documents and Settings\scorder\Application Data\ICSharpCode/SharpDevelop3.0\Settings.SourceAnalysis + False + False + false + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + true + v3.5 + MongoDB + C:\Documents and Settings\scorder\Application Data\ICSharpCode/SharpDevelop3.0\Settings.SourceAnalysis + False + False + false + true + ..\..\StrongName.snk + + + true + full + false + bin\Debug + TRACE;DEBUG + prompt + 4 + 618, 1718 + AllRules.ruleset + + + none + false + bin\Release + prompt + 4 + AllRules.ruleset + + + False + + + False + Auto + 4194304 + AnyCPU + 4096 + + + + False + ..\..\redist\nunit.framework.dll + + + + + 3.5 + + + + 3.5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + + + false + + + False + + + false + + + False + + + false + + + False + .NET Framework 3.5 SP1 + false + + + + + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} + MongoDB + + + + + + Always + + + + + StrongName.snk + + + true + MongoDB.Driver.Tests.dll.config + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + + \ No newline at end of file diff --git a/MongoDB.Net-Tests/MongoTestBase.cs b/source/MongoDB.Tests/MongoTestBase.cs old mode 100755 new mode 100644 similarity index 75% rename from MongoDB.Net-Tests/MongoTestBase.cs rename to source/MongoDB.Tests/MongoTestBase.cs index f0bdf3e2..209dd834 --- a/MongoDB.Net-Tests/MongoTestBase.cs +++ b/source/MongoDB.Tests/MongoTestBase.cs @@ -1,26 +1,25 @@ using System; using System.Configuration; - using NUnit.Framework; +using MongoDB.Configuration; -namespace MongoDB.Driver +namespace MongoDB { - public abstract class MongoTestBase { public Mongo Mongo{get;set;} - public Database DB{ + + public IMongoDatabase DB{ get{ return this.Mongo["tests"]; } } - + /// /// Comma separated list of collections to clean at startup. /// public abstract string TestCollections{get;} - /// /// Override to add custom initialization code. /// @@ -31,15 +30,12 @@ public virtual void OnInit(){} /// public virtual void OnDispose(){} - /// /// Sets up the test environment. You can either override this OnInit to add custom initialization. /// [TestFixtureSetUp] public virtual void Init(){ - string connstr = ConfigurationManager.AppSettings["tests"]; - if(String.IsNullOrEmpty(connstr)) throw new ArgumentNullException("Connection string not found."); - this.Mongo = new Mongo(connstr); + this.Mongo = new Mongo(GetConfiguration().BuildConfiguration()); this.Mongo.Connect(); CleanDB(); OnInit(); @@ -58,5 +54,12 @@ protected void CleanDB(){ //Console.WriteLine("Dropping " + col); } } + + protected virtual MongoConfigurationBuilder GetConfiguration() + { + var builder = new MongoConfigurationBuilder(); + builder.ReadConnectionStringFromAppSettings("tests"); + return builder; + } } } \ No newline at end of file diff --git a/MongoDB.Net-Tests/test-data/tests.binary.txt b/source/MongoDB.Tests/Test-Data/tests.binary.txt similarity index 100% rename from MongoDB.Net-Tests/test-data/tests.binary.txt rename to source/MongoDB.Tests/Test-Data/tests.binary.txt diff --git a/MongoDB.Net-Tests/test-data/tests.charreads.txt b/source/MongoDB.Tests/Test-Data/tests.charreads.txt similarity index 100% rename from MongoDB.Net-Tests/test-data/tests.charreads.txt rename to source/MongoDB.Tests/Test-Data/tests.charreads.txt diff --git a/MongoDB.Net-Tests/test-data/tests.reads.txt b/source/MongoDB.Tests/Test-Data/tests.reads.txt similarity index 100% rename from MongoDB.Net-Tests/test-data/tests.reads.txt rename to source/MongoDB.Tests/Test-Data/tests.reads.txt diff --git a/MongoDB.Net-Tests/test-data/tests.smallreads.txt b/source/MongoDB.Tests/Test-Data/tests.smallreads.txt similarity index 100% rename from MongoDB.Net-Tests/test-data/tests.smallreads.txt rename to source/MongoDB.Tests/Test-Data/tests.smallreads.txt diff --git a/source/MongoDB.Tests/UnitTests/Bson/BsonTestBase.cs b/source/MongoDB.Tests/UnitTests/Bson/BsonTestBase.cs new file mode 100644 index 00000000..f2ad495e --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Bson/BsonTestBase.cs @@ -0,0 +1,62 @@ +using System; +using System.IO; +using MongoDB.Bson; + +namespace MongoDB.UnitTests.Bson +{ + public abstract class BsonTestBase + { + protected string Serialize(Document document) + { + return Serialize(document, new BsonWriterSettings()); + } + + protected string Serialize(Document document, BsonWriterSettings settings) + { + using(var mem = new MemoryStream()) + { + var writer = new BsonWriter(mem, settings); + writer.WriteObject(document); + writer.Flush(); + return Convert.ToBase64String(mem.ToArray()); + } + } + + protected Document Deserialize(string base64){ + return Deserialize(base64, new BsonReaderSettings()); + } + + protected Document Deserialize(string base64, BsonReaderSettings settings) + { + using(var mem = new MemoryStream(Convert.FromBase64String(base64))) + { + var reader = new BsonReader(mem, settings); + return (Document)reader.ReadObject(); + } + } + + + protected byte[] HexToBytes(string hex) + { + //TODO externalize somewhere. + if (hex.Length % 2 == 1) + { + Console.WriteLine("uneven number of hex pairs."); + hex = "0" + hex; + } + var numberChars = hex.Length; + var bytes = new byte[numberChars / 2]; + for (var i = 0; i < numberChars; i += 2) + try + { + bytes[i / 2] = Convert.ToByte(hex.Substring(i, 2), 16); + } + catch + { + //failed to convert these 2 chars, they may contain illegal charracters + bytes[i / 2] = 0; + } + return bytes; + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Bson/TestBsonBinary.cs b/source/MongoDB.Tests/UnitTests/Bson/TestBsonBinary.cs new file mode 100644 index 00000000..515fa9df --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Bson/TestBsonBinary.cs @@ -0,0 +1,60 @@ +using System; +using System.IO; +using MongoDB.Bson; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Bson +{ + [TestFixture] + public class TestBsonBinary + { + protected static byte[] DecodeHex(string val){ + var numberChars = val.Length; + + var bytes = new byte[numberChars/2]; + for(var i = 0; i < numberChars; i += 2) + try{ + bytes[i/2] = Convert.ToByte(val.Substring(i, 2), 16); + } + catch{ + //failed to convert these 2 chars, they may contain illegal charracters + bytes[i/2] = 0; + } + return bytes; + } + + [Test] + public void TestBinaryRead(){ + const string hex = "28000000075f6964004b1971811d8b0f00c0000000056461746100070000000203000000e188b400"; + + var data = DecodeHex(hex); + var inmem = new MemoryStream(data); + var inreader = new BsonReader(inmem,new BsonDocumentBuilder()); + var indoc = inreader.Read(); + + var outmem = new MemoryStream(); + var outwriter = new BsonWriter(outmem, new BsonDocumentDescriptor()); + outwriter.WriteObject(indoc); + var outdata = outmem.ToArray(); + var outhex = BitConverter.ToString(outdata); + outhex = outhex.Replace("-", ""); + + Assert.AreEqual(hex, outhex.ToLower()); + } + + [Test] + public void TestRoundTrip(){ + var idoc = new Document{{"b", new Binary(new[]{(byte)1, (byte)2})}}; + + var stream = new MemoryStream(); + var writer = new BsonWriter(stream, new BsonDocumentDescriptor()); + writer.WriteObject(idoc); + + stream.Seek(0, SeekOrigin.Begin); + var reader = new BsonReader(stream,new BsonDocumentBuilder()); + var odoc = reader.Read(); + + Assert.AreEqual(idoc.ToString(), odoc.ToString()); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Bson/TestBsonReader.cs b/source/MongoDB.Tests/UnitTests/Bson/TestBsonReader.cs new file mode 100644 index 00000000..92a6d83e --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Bson/TestBsonReader.cs @@ -0,0 +1,287 @@ +using System; +using System.IO; +using System.Text; +using MongoDB.Bson; +using MongoDB.Util; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Bson +{ + [TestFixture] + public class TestBsonReader : BsonTestBase + { + private char pound = '\u00a3'; + private char euro = '\u20ac'; + + private string WriteAndReadString(string val){ + var buf = Encoding.UTF8.GetBytes(val + '\0'); + + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + return reader.ReadString(); + } + + private string WriteAndReadLenString(string val){ + var ms = new MemoryStream(); + var bs = new BsonWriter(ms, new BsonDocumentDescriptor()); + var w = new BinaryWriter(ms); + var byteCount = bs.CalculateSize(val, false); + w.Write(byteCount); + bs.Write(val, false); + ms.Seek(0, SeekOrigin.Begin); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + return reader.ReadLengthString(); + } + + [Test] + public void TestReadDocWithDocs(){ + // Document doc = new Document().Append("a", new Document().Append("b", new Document().Append("c",new Document()))); + // Console.WriteLine(ConvertDocToHex(doc)); + var buf = HexToBytes("1D000000036100150000000362000D0000000363000500000000000000"); + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + + var doc = (Document)reader.ReadObject(); + Assert.IsNotNull(doc, "Document was null"); + Assert.AreEqual(buf.Length, reader.Position); + Assert.IsTrue(doc.ContainsKey("a")); + } + + [Test] + public void TestReadEmptyDocument(){ + var buf = HexToBytes("0500000000"); + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + + var doc = (Document)reader.ReadObject(); + + Assert.IsNotNull(doc); + } + + [Test] + public void TestReadLenString(){ + const string expected = "test"; + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringLong(){ + var sb = new StringBuilder(); + sb.Append('t', 150); + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringShortTripleByte(){ + var sb = new StringBuilder(); + //sb.Append('1',127); //first char of euro at the end of the boundry. + //sb.Append(euro, 5); + //sb.Append('1',128); + sb.Append(euro); + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringTripleByteCharBufferBoundry0(){ + var sb = new StringBuilder(); + sb.Append('1', 127); //first char of euro at the end of the boundry. + sb.Append(euro, 5); + sb.Append('1', 128); + sb.Append(euro); + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringTripleByteCharBufferBoundry1(){ + var sb = new StringBuilder(); + sb.Append('1', 126); + sb.Append(euro, 5); //middle char of euro at the end of the boundry. + sb.Append('1', 128); + sb.Append(euro); + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringTripleByteCharBufferBoundry2(){ + var sb = new StringBuilder(); + sb.Append('1', 125); + sb.Append(euro, 5); //last char of the eruo at the end of the boundry. + sb.Append('1', 128); + sb.Append(euro); + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringTripleByteCharOne(){ + var sb = new StringBuilder(); + sb.Append(euro, 1); //Just one triple byte char in the string. + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadLenStringValue(){ + const string expected = "test"; + + Assert.AreEqual(expected, WriteAndReadLenString(expected)); + } + + [Test] + public void TestReadMultiElementDocument(){ + var buf = HexToBytes("2D000000075F6964004A753AD8FAC16EA58B290351016100000000000000F03F02620005000000746573740000"); + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + + var doc = (Document)reader.ReadObject(); + + Assert.IsNotNull(doc, "Document was null"); + Assert.IsTrue(doc.ContainsKey("_id")); + Assert.IsTrue(doc.ContainsKey("a")); + Assert.IsTrue(doc.ContainsKey("b")); + Assert.AreEqual("4a753ad8fac16ea58b290351", (doc["_id"]).ToString()); + Assert.AreEqual(1, Convert.ToInt32(doc["a"])); + Assert.AreEqual("test", doc["b"]); + } + + [Test] + public void TestReadSimpleDocument(){ + var buf = HexToBytes("1400000002746573740005000000746573740000"); + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + + var doc = reader.Read(); + + Assert.IsNotNull(doc, "Document was null"); + Assert.IsTrue(doc.ContainsKey("test")); + Assert.AreEqual("test", doc["test"]); + } + + [Test] + public void TestReadString(){ + var buf = HexToBytes("7465737400"); + var ms = new MemoryStream(buf); + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + + var s = reader.ReadString(); + Assert.AreEqual("test", s); + Assert.AreEqual(4, Encoding.UTF8.GetByteCount(s)); + } + + [Test] + public void TestReadBigDocument(){ + MemoryStream ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + + Document expected = new Document(); + expected.Append("str", "test") + .Append("int", 45) + .Append("long", (long)46) + .Append("num", 4.5) + .Append("date",DateTime.Today) + .Append("_id", new OidGenerator().Generate()) + .Append("code", new Code("return 1;")) + .Append("subdoc", new Document().Append("a",1).Append("b",2)) + .Append("array", new String[]{"a","b","c","d"}) + .Append("codewscope", new CodeWScope("return 2;", new Document().Append("c",1))) + .Append("binary", new Binary(new byte[]{0,1,2,3})) + .Append("regex", new MongoRegex("[A-Z]")) + .Append("minkey", MongoMinKey.Value) + .Append("maxkey", MongoMaxKey.Value) + .Append("symbol", new MongoSymbol("symbol")) + ; + writer.WriteObject(expected); + writer.Flush(); + ms.Seek(0,SeekOrigin.Begin); + + BsonReader reader = new BsonReader(ms, new BsonDocumentBuilder()); + Document doc = reader.Read(); + + Assert.IsNotNull(doc); + } + + [Test] + public void TestReadStringBreakDblByteCharOverBuffer(){ + var sb = new StringBuilder(); + sb.Append('1', 127); + sb.Append(pound); //will break the pound symbol over the buffer boundry. + //sb.Append("1"); + + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadString(expected)); + } + + [Test] + public void TestReadStringDblByteCharOnEndOfBufferBoundry(){ + var sb = new StringBuilder(); + sb.Append(pound, 66); //puts a pound symbol at the end of the buffer boundry but not broken. + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadString(expected)); + } + + [Test] + public void TestReadStringLong(){ + var sb = new StringBuilder(); + sb.Append('t', 256); + var expected = sb.ToString(); + Assert.AreEqual(expected, WriteAndReadString(expected)); + } + + [Test] + public void TestReadStringTripleByteCharBufferBoundry(){ + var sb = new StringBuilder(); + sb.Append("12"); + sb.Append(euro, 66); //will break the euro symbol over the buffer boundry. + + var expected = sb.ToString(); + + Assert.AreEqual(expected, WriteAndReadString(expected)); + } + + [Test] + public void TestReadStringWithUkPound(){ + const string expected = "1234£56"; + Assert.AreEqual(expected, WriteAndReadString(expected)); + } + + [Test] + public void TestReadUtcTimeByDefault(){ + var document = Deserialize("EwAAAAl0aW1lAADJU+klAQAAAA=="); + + var dateTime = new DateTime(2010, 1, 1, 10, 0, 0, DateTimeKind.Utc); + + Assert.AreEqual(dateTime, document["time"]); + } + + [Test] + public void TestReadUtcTimeToLocalTime(){ + var settings = new BsonReaderSettings {ReadLocalTime = true}; + + var document = Deserialize("EwAAAAl0aW1lAADJU+klAQAAAA==", settings); + + var localtzoffset =TimeZoneInfo.Local.BaseUtcOffset.Hours - 1; //gmt offset the local date was saved in along with the local offset. + + var dateTime = new DateTime(2010, 1, 1, 11, 0, 0, DateTimeKind.Local).AddHours(localtzoffset); + Assert.AreEqual(dateTime, document["time"]); + } + + [Test] + public void TestCanReadNagativeDates() + { + const string bson = "EwAAAAlkYXRlAIBFaoO2////AA=="; + + var document = Deserialize(bson); + + Assert.AreEqual(new DateTime(1960,1,1).ToUniversalTime(),document["date"]); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Bson/TestBsonWriter.cs b/source/MongoDB.Tests/UnitTests/Bson/TestBsonWriter.cs new file mode 100644 index 00000000..b32a6163 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Bson/TestBsonWriter.cs @@ -0,0 +1,258 @@ +using System; +using System.IO; +using System.Text; +using System.Text.RegularExpressions; +using MongoDB.Bson; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Bson +{ + [TestFixture] + public class TestBsonWriter : BsonTestBase + { + private char euro = '\u20ac'; + + private string WriteStringAndGetHex(string val) + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + writer.Write(val, false); + return BitConverter.ToString(ms.ToArray()); + } + + [Test] + public void TestCalculateSizeOfComplexDoc() + { + var doc = new Document(); + doc.Add("a", "a"); + doc.Add("b", 1); + var sub = new Document().Add("c_1", 1).Add("c_2", DateTime.Now); + doc.Add("c", sub); + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + + Assert.AreEqual(51, writer.CalculateSizeObject(doc)); + } + + [Test] + public void TestCalculateSizeOfEmptyDoc() + { + var doc = new Document(); + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + + Assert.AreEqual(5, writer.CalculateSizeObject(doc)); + } + + [Test] + public void TestCalculateSizeOfSimpleDoc() + { + var doc = new Document {{"a", "a"}, {"b", 1}}; + + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + //BsonDocument bdoc = BsonConvert.From(doc); + + Assert.AreEqual(21, writer.CalculateSizeObject(doc)); + } + + [Test] + public void TestLocalDateTimeIsWrittenAsUtcTime() + { + var localtzoffset = TimeZoneInfo.Local.BaseUtcOffset.Hours; + + var dateTime = new DateTime(2010, 1, 1, 11, 0, 0, DateTimeKind.Local); + var utcTime = new DateTime(2010, 1, 1, 11 - localtzoffset, 0, 0, DateTimeKind.Utc); + + var base64 = Serialize(new Document("time", dateTime)); + var expected = Serialize(new Document("time", utcTime)); + + Assert.AreEqual(expected, base64); + } + + [Test] + public void TestNullsDontThrowExceptions() + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + var doc = new Document().Add("n", null); + try + { + writer.WriteObject(doc); + } + catch(NullReferenceException) + { + Assert.Fail("Null Reference Exception was thrown on trying to serialize a null value"); + } + } + + [Test] + public void TestWriteArrayDoc() + { + const string expected = "2000000002300002000000610002310002000000620002320002000000630000"; + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + + var str = new[] {"a", "b", "c"}; + writer.WriteValue(BsonType.Array, str); + + var hexdump = BitConverter.ToString(ms.ToArray()); + hexdump = hexdump.Replace("-", ""); + Assert.AreEqual(expected, hexdump); + } + + [Test] + public void TestWriteDocument() + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + const string expected = "1400000002746573740005000000746573740000"; + var doc = new Document().Add("test", "test"); + + writer.WriteObject(doc); + + var hexdump = BitConverter.ToString(ms.ToArray()); + hexdump = hexdump.Replace("-", ""); + + Assert.AreEqual(expected, hexdump); + } + + [Test] + public void TestWriteMultibyteString() + { + var val = new StringBuilder().Append(euro, 3).ToString(); + var expected = BitConverter.ToString(Encoding.UTF8.GetBytes(val + '\0')); + Assert.AreEqual(expected, WriteStringAndGetHex(val)); + } + + [Test] + public void TestWriteMultibyteStringLong() + { + var val = new StringBuilder().Append("ww").Append(euro, 180).ToString(); + var expected = BitConverter.ToString(Encoding.UTF8.GetBytes(val + '\0')); + Assert.AreEqual(expected, WriteStringAndGetHex(val)); + } + + [Test] + public void TestWriteSingle() + { + var expected = "000000E0FFFFEF47"; + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + var val = Single.MaxValue; + + writer.WriteValue(BsonType.Number, val); + + var hexdump = BitConverter.ToString(ms.ToArray()); + hexdump = hexdump.Replace("-", ""); + Assert.AreEqual(expected, hexdump); + } + + [Test] + public void TestWriteString() + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + const string expected = "54-65-73-74-73-2E-69-6E-73-65-72-74-73-00"; + writer.Write("Tests.inserts", false); + + var hexdump = BitConverter.ToString(ms.ToArray()); + + Assert.AreEqual(expected, hexdump); + } + + [Test] + public void TestWriteSymbol() + { + var expected = "0700000073796D626F6C00"; + + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + MongoSymbol val = "symbol"; + Assert.IsTrue(String.IsInterned(val) != null); + writer.WriteValue(BsonType.Symbol, val); + var hexdump = BitConverter.ToString(ms.ToArray()).Replace("-", ""); + + Assert.AreEqual(expected, hexdump); + } + + [Test] + public void TestWriteUtcTimeByDefault() + { + var dateTime = new DateTime(2010, 1, 1, 10, 0, 0, DateTimeKind.Utc); + + var base64 = Serialize(new Document("time", dateTime)); + + Assert.AreEqual("EwAAAAl0aW1lAADJU+klAQAAAA==", base64); + } + + [Test] + [ExpectedException(typeof(ArgumentException), UserMessage = "Shouldn't be able to write large document")] + public void TestWritingTooLargeDocument() + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + var b = new Binary(new byte[BsonInfo.MaxDocumentSize]); + var big = new Document().Add("x", b); + + writer.WriteObject(big); + } + + [Test] + public void TestWriteBytesAsBinary() + { + var bson = Serialize(new Document("bytes", new byte[] {10, 12})); + + Assert.AreEqual("FwAAAAVieXRlcwAGAAAAAgIAAAAKDAA=",bson); + } + + [Test] + public void TestWriteTimeSpanAsLong() + { + var span = TimeSpan.FromSeconds(123456); + + var bson = Serialize(new Document("span", span)); + + Assert.AreEqual("EwAAABJzcGFuAACggnEfAQAAAA==",bson); + } + + [Test] + public void TestWriteUriAsString() + { + var uri = new Uri("http://www.microsoft.com"); + + var bson = Serialize(new Document("uri", uri)); + + Assert.AreEqual("KAAAAAJ1cmkAGgAAAGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS8AAA==",bson); + } + + [Test] + public void TestWriteMongoRegex() + { + var regex = new MongoRegex("expression", + MongoRegexOption.IgnoreCase | MongoRegexOption.IgnorePatternWhitespace | MongoRegexOption.Multiline); + + var bson = Serialize(new Document("regex", regex)); + + Assert.AreEqual("GwAAAAtyZWdleABleHByZXNzaW9uAGltZwAA",bson); + } + + [Test] + public void TestWriteNetRegex() + { + var regex = new Regex("expression", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace | RegexOptions.Multiline); + + var bson = Serialize(new Document("regex", regex)); + + Assert.AreEqual("GwAAAAtyZWdleABleHByZXNzaW9uAGltZwAA", bson); + } + + [Test] + public void TestCanWriteNagativeDates() + { + var bson = Serialize(new Document("date", new DateTime(1960, 1, 1))); + + Assert.AreEqual("EwAAAAlkYXRlAIBFaoO2////AA==",bson); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/Bson/TestRoundTrips.cs b/source/MongoDB.Tests/UnitTests/Bson/TestRoundTrips.cs similarity index 90% rename from MongoDB.Net-Tests/Bson/TestRoundTrips.cs rename to source/MongoDB.Tests/UnitTests/Bson/TestRoundTrips.cs index 3461271b..9226e437 100644 --- a/MongoDB.Net-Tests/Bson/TestRoundTrips.cs +++ b/source/MongoDB.Tests/UnitTests/Bson/TestRoundTrips.cs @@ -1,13 +1,11 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; - -using MongoDB.Driver.Bson; - -using NUnit.Framework; - -namespace MongoDB.Driver.Bson +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using MongoDB.Bson; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Bson { [TestFixture] public class TestRoundTrips @@ -31,12 +29,12 @@ public void TestMultibyteUnicode(){ [Test] public void TestDBRef(){ - Document source = new Document(); - source.Append("x",1).Append("ref",new DBRef("refs","ref1")); + Document source = new Document(); + source.Add("x", 1).Add("ref", new DBRef("refs", "ref1")); - Document copy = WriteAndRead(source); - - Assert.IsTrue(copy.Contains("ref")); + Document copy = WriteAndRead(source); + + Assert.IsTrue(copy.ContainsKey("ref")); Assert.IsTrue(copy["ref"].GetType() == typeof(DBRef)); DBRef sref = (DBRef)source["ref"]; @@ -50,8 +48,8 @@ public void TestDBRef(){ public void TestDateLocal(){ DateTime now = DateTime.Now; - Document source = new Document(); - source.Append("d",now); + Document source = new Document(); + source.Add("d", now); Document copy = WriteAndRead(source); @@ -66,8 +64,8 @@ public void TestDateLocal(){ public void TestDateUTC(){ DateTime now = DateTime.UtcNow; - Document source = new Document(); - source.Append("d",now); + Document source = new Document(); + source.Add("d", now); Document copy = WriteAndRead(source); DateTime then = (DateTime)copy["d"]; @@ -81,7 +79,7 @@ public void TestGUID(){ Guid expected = Guid.NewGuid(); Document source = new Document(); - source.Append("uuid", expected); + source.Add("uuid", expected); Guid read = (Guid)(WriteAndRead(source)["uuid"]); @@ -147,14 +145,14 @@ public void TestEmptyArray(){ } protected Document WriteAndRead(Document source){ - MemoryStream ms = new MemoryStream(); - BsonWriter writer = new BsonWriter(ms); + MemoryStream ms = new MemoryStream(); + BsonWriter writer = new BsonWriter(ms, new BsonDocumentDescriptor()); - writer.Write(source); + writer.WriteObject(source); writer.Flush(); ms.Seek(0, SeekOrigin.Begin); - BsonReader reader = new BsonReader(ms); + BsonReader reader = new BsonReader(ms,new BsonDocumentBuilder()); return reader.Read(); } } diff --git a/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationBuilderTests.cs b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationBuilderTests.cs new file mode 100644 index 00000000..f0391bea --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationBuilderTests.cs @@ -0,0 +1,46 @@ +using System; +using MongoDB.Configuration; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Configuration +{ + [TestFixture] + public class MongoConfigurationBuilderTests + { + private class Person + { + public Guid Id { get; set; } + + public int Age { get; set; } + + public string Name { get; set; } + } + + [Test] + public void Test() + { + var configure = new MongoConfigurationBuilder(); + + configure.ConnectionString(cs => + { + cs.Pooled = true; + }); + + configure.Mapping(mapping => + { + mapping.DefaultProfile(p => + { + p.AliasesAreCamelCased(); + p.CollectionNamesAreCamelCasedAndPlural(); + }); + + mapping.Map(m => + { + m.CollectionName("people"); + m.Member(x => x.Age).Alias("age"); + m.Member(x => x.Name).Alias("name").DefaultValue("something").Ignore(); + }); + }); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.1.config b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.1.config new file mode 100644 index 00000000..e61fc3bb --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.1.config @@ -0,0 +1,11 @@ + + + +
+ + + + + + + \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.2.config b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.2.config new file mode 100644 index 00000000..6de8c6b9 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.2.config @@ -0,0 +1,11 @@ + + + +
+ + + + + + + \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.cs b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.cs new file mode 100644 index 00000000..fe0fe2ca --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationSectionTests.cs @@ -0,0 +1,88 @@ +using System.Configuration; +using System.IO; +using System.Reflection; +using System.Text; +using MongoDB.Configuration; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Configuration +{ + [TestFixture] + public class MongoConfigurationSectionTests + { + private MongoConfigurationSection ReadFromFile(int index) + { + var name = string.Format("{0}.{1}.config", GetType().FullName, index); + var assembly = Assembly.GetExecutingAssembly(); + var tmpFile = new FileInfo(Path.GetTempFileName()); + try + { + using(var stream = assembly.GetManifestResourceStream(name)) + using(var reader = new StreamReader(stream, Encoding.Default, false)) + File.WriteAllText(tmpFile.FullName, reader.ReadToEnd(),Encoding.Default); + + var map = new ExeConfigurationFileMap { ExeConfigFilename = tmpFile.FullName }; + var exeConfiguration = ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.None); + return exeConfiguration.GetSection(MongoConfigurationSection.DefaultSectionName) as MongoConfigurationSection; + } + finally + { + if(tmpFile.Exists) + tmpFile.Delete(); + } + } + + [Test] + public void CanReadFromTestsConfig() + { + var section = MongoConfigurationSection.GetSection(); + Assert.IsNotNull(section); + Assert.AreEqual("Server=localhost:27017", section.Connections["default"].ConnectionString); + Assert.AreEqual("Server=localhost:27018", section.Connections["local21018"].ConnectionString); + } + + [Test] + public void CanReadWithNonDefaultSectionName() + { + var section = MongoConfigurationSection.GetSection("mongoNonDefaultName"); + Assert.IsNotNull(section); + Assert.AreEqual("Server=localhost:27018", section.Connections["local21018"].ConnectionString); + } + + [Test] + public void CanCreateConfigurationFromSection() + { + var section = MongoConfigurationSection.GetSection(); + var config = section.CreateConfiguration(); + Assert.IsNotNull(config); + Assert.AreEqual("Server=localhost:27017", config.ConnectionString); + } + + [Test] + public void CanUpdateConfigurationFromSection() + { + var section = ReadFromFile(1); + var config = new MongoConfiguration(); + Assert.IsEmpty(config.ConnectionString); + section.UpdateConfiguration(config); + Assert.AreEqual("Server=localhost:27017", config.ConnectionString); + } + + [Test] + public void CanReadLocalTimeFromConfig() + { + var section = ReadFromFile(2); + Assert.IsNotNull(section); + Assert.AreEqual(false,section.ReadLocalTime); + } + + [Test] + public void CanUpdateConfigurationFromReadLocalTime() + { + var section = ReadFromFile(2); + Assert.IsNotNull(section); + var config = section.CreateConfiguration(); + Assert.AreEqual(false, config.ReadLocalTime); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationTests.cs b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationTests.cs new file mode 100644 index 00000000..eef4372a --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Configuration/MongoConfigurationTests.cs @@ -0,0 +1,61 @@ +using System; +using MongoDB.Configuration; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Configuration +{ + [TestFixture] + public class MongoConfigurationTests + { + [Test] + public void IsModifiableByDefault() + { + var config = new MongoConfiguration(); + Assert.IsTrue(config.IsModifiable); + } + + [Test] + public void IsNotModifiableAfterValidate() + { + var config = new MongoConfiguration(); + config.ValidateAndSeal(); + Assert.IsFalse(config.IsModifiable); + } + + [Test] + [ExpectedException(typeof(InvalidOperationException))] + public void CanNotChangeConnectionStringAfterValidate() + { + var config = new MongoConfiguration(); + config.ValidateAndSeal(); + config.ConnectionString = ""; + } + + [Test] + [ExpectedException(typeof(InvalidOperationException))] + public void CanNotChangeMappingStoreAfterValidate() + { + var config = new MongoConfiguration(); + config.ValidateAndSeal(); + config.MappingStore = null; + } + + [Test] + [ExpectedException(typeof(InvalidOperationException))] + public void CanNotChangeReadLocalTimeAfterValidate() + { + var config = new MongoConfiguration(); + config.ValidateAndSeal(); + config.ReadLocalTime = true; + } + + [Test] + [ExpectedException(typeof(InvalidOperationException))] + public void CanNotChangeSerializationFactoryAfterValidate() + { + var config = new MongoConfiguration(); + config.ValidateAndSeal(); + config.SerializationFactory = null; + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/IO/TestMsgMessage.cs b/source/MongoDB.Tests/UnitTests/IO/TestMsgMessage.cs similarity index 93% rename from MongoDB.Net-Tests/IO/TestMsgMessage.cs rename to source/MongoDB.Tests/UnitTests/IO/TestMsgMessage.cs index ea2c755d..ce6c6740 100644 --- a/MongoDB.Net-Tests/IO/TestMsgMessage.cs +++ b/source/MongoDB.Tests/UnitTests/IO/TestMsgMessage.cs @@ -1,10 +1,9 @@ - -using System; +using System; using System.IO; -using MongoDB.Driver.Protocol; -using NUnit.Framework; - -namespace MongoDB.Driver.IO +using MongoDB.Protocol; +using NUnit.Framework; + +namespace MongoDB.UnitTests.IO { [TestFixture()] public class TestMsgMessage diff --git a/source/MongoDB.Tests/UnitTests/IO/TestQueryMessage.cs b/source/MongoDB.Tests/UnitTests/IO/TestQueryMessage.cs new file mode 100644 index 00000000..89407b50 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/IO/TestQueryMessage.cs @@ -0,0 +1,54 @@ +using System; +using System.IO; +using MongoDB.Bson; +using MongoDB.Protocol; +using NUnit.Framework; + +namespace MongoDB.UnitTests.IO +{ + [TestFixture] + public class TestQueryMessage + { + [Test] + public void TestAllBytesWritten() + { + var query = new Document {{"col1", 1}}; + + var msg = new QueryMessage(new BsonWriterSettings(), query, "TestDB.TestCol"); + var buffer = new MemoryStream(); + msg.Write(buffer); + + var output = buffer.ToArray(); + var hexdump = BitConverter.ToString(output); + //Console.WriteLine("Dump: " + hexdump); + + Assert.IsTrue(output.Length > 0); + Assert.AreEqual("3A-00-00-00-00-00-00-00-00-00-00-00-D4-07-00-00-00-00-00-00-54-65-73-74-44-42-2E-54-65-73-74-43-6F-6C-00-00-00-00-00-00-00-00-00-0F-00-00-00-10-63-6F-6C-31-00-01-00-00-00-00", + hexdump); + } + + [Test] + public void TestWriteMessageTwice() + { + const string expectedHex = "3A-00-00-00-00-00-00-00-00-00-00-00-D4-07-00-00-00-00-00-00-54-65-73-74-44-42-2E-54-65-73-74-43-6F-6C-00-00-00-00-00-00-00-00-00-0F-00-00-00-10-63-6F-6C-31-00-01-00-00-00-00"; + var query = new Document(); + query.Add("col1", 1); + + var msg = new QueryMessage(new BsonWriterSettings(), query, "TestDB.TestCol"); + var buffer = new MemoryStream(); + msg.Write(buffer); + + var output = buffer.ToArray(); + var hexdump = BitConverter.ToString(output); + + var buffer2 = new MemoryStream(); + msg.Write(buffer2); + + var output2 = buffer.ToArray(); + var hexdump2 = BitConverter.ToString(output2); + + Assert.AreEqual(expectedHex, hexdump); + Assert.AreEqual(hexdump, hexdump2); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/DocumentPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/DocumentPropertyTests.cs new file mode 100644 index 00000000..f1f69df6 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/DocumentPropertyTests.cs @@ -0,0 +1,24 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class DocumentPropertyTests : SerializationTestBase + { + public class DocumentProperty + { + public Document A { get; set; } + } + + [Test] + public void CanDeserializeADocumentProperty() + { + //{ A: { B: "b" } } + const string bson = "FgAAAANBAA4AAAACQgACAAAAYgAAAA=="; + var documentProperty = Deserialize(bson); + Assert.IsNotNull(documentProperty); + Assert.IsNotNull(documentProperty.A); + Assert.AreEqual("b", documentProperty.A["B"]); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/EmbeddedClassPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/EmbeddedClassPropertyTests.cs new file mode 100644 index 00000000..5a140189 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/EmbeddedClassPropertyTests.cs @@ -0,0 +1,33 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class EmbeddedClassPropertyTests : SerializationTestBase + { + public class SimpleObject + { + public string A { get; set; } + public string B { get; set; } + public SimpleObjectC C { get; set; } + } + + public class SimpleObjectC + { + public string D { get; set; } + } + + [Test] + public void CanDeserializeASimpleObject() + { + //{ A: "a", B: "b", C: { D: "d" } } + const string bson = "KAAAAAJBAAIAAABhAAJCAAIAAABiAANDAA4AAAACRAACAAAAZAAAAA=="; + var simpleObject = Deserialize(bson); + Assert.IsNotNull(simpleObject); + Assert.AreEqual("a", simpleObject.A); + Assert.AreEqual("b", simpleObject.B); + Assert.IsNotNull(simpleObject.C); + Assert.AreEqual("d", simpleObject.C.D); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/EnumerablePropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/EnumerablePropertyTests.cs new file mode 100644 index 00000000..2d7aaf9d --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/EnumerablePropertyTests.cs @@ -0,0 +1,68 @@ +using System.Collections; +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class EnumerablePropertyTests : SerializationTestBase + { + public class Enumerable + { + public IEnumerable A { get; set; } + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerable() + { + //{ A: [1, 2] } + const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA"; + var simpleArray = Deserialize(bson); + Assert.IsNotNull(simpleArray); + Assert.IsNotNull(simpleArray.A); + var list = new List(); + foreach (var value in simpleArray.A) + list.Add(value); + Assert.AreEqual(2, list.Count); + Assert.Contains(1, list); + Assert.Contains(2, list); + } + + public class EnumerableOfIntegers + { + public IEnumerable A { get; set; } + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerableOfInt() + { + //{ A: [1, 2] } + const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA"; + var simpleArray = Deserialize(bson); + Assert.IsNotNull(simpleArray); + Assert.IsNotNull(simpleArray.A); + var list = new List(simpleArray.A); + Assert.AreEqual(2, list.Count); + Assert.Contains(1, list); + Assert.Contains(2, list); + } + + [Test] + public void CanTransfromASimpleArrayToItsMostEqualTypeInADocument(){ + var bson = Serialize(new Document().Add("A", new[] {"text"})); + + var simpleArray = Deserialize(bson); + + Assert.AreEqual(1, simpleArray.Count); + + var array = simpleArray["A"]; + + Assert.IsNotNull(array); + Assert.IsInstanceOfType(typeof(IList),array); + + var stringArray = (IList)array; + Assert.AreEqual(1,stringArray.Count); + Assert.Contains("text",(ICollection)stringArray); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/ExtendedPropertiesTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/ExtendedPropertiesTests.cs new file mode 100644 index 00000000..8ef4e63a --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/ExtendedPropertiesTests.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class ExtendedPropertiesTests : SerializationTestBase + { + public class IDictionaryProperty + { + public IDictionary ExtendedProperties { get; private set; } + } + + [Test] + public void CanDeserializePropertiesWithoutMapsUsingAnIDictionary() + { + //{ A: { B: "b" } } + const string bson = "FgAAAANBAA4AAAACQgACAAAAYgAAAA=="; + var prop = Deserialize(bson); + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.ExtendedProperties["A"]); + Assert.AreEqual("b", ((Document)prop.ExtendedProperties["A"])["B"]); + } + + public class DictionaryProperty + { + public Dictionary ExtendedProperties { get; private set; } + } + + [Test] + public void CanDeserializePropertiesWithoutMapsUsingADictionary() + { + //{ A: { B: "b" } } + const string bson = "FgAAAANBAA4AAAACQgACAAAAYgAAAA=="; + var prop = Deserialize(bson); + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.ExtendedProperties["A"]); + Assert.AreEqual("b", ((Document)prop.ExtendedProperties["A"])["B"]); + } + + public class DocumentProperty + { + public Document ExtendedProperties { get; private set; } + } + + [Test] + public void CanDeserializePropertiesWithoutMapsUsingADocument() + { + //{ A: { B: "b" } } + const string bson = "FgAAAANBAA4AAAACQgACAAAAYgAAAA=="; + var prop = Deserialize(bson); + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.ExtendedProperties["A"]); + Assert.AreEqual("b", ((Document)prop.ExtendedProperties["A"])["B"]); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/GenericListPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/GenericListPropertyTests.cs new file mode 100644 index 00000000..409abb92 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/GenericListPropertyTests.cs @@ -0,0 +1,86 @@ +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class GenericListPropertyTests : SerializationTestBase + { + public class GenericListOfObjects + { + public List A { get; set; } + } + + [Test] + public void CanDeserializeGenericListOfObjects() + { + //{ A: [1, 2] } + const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA"; + var simpleArray = Deserialize(bson); + Assert.IsNotNull(simpleArray); + Assert.IsNotNull(simpleArray.A); + Assert.AreEqual(2, simpleArray.A.Count); + Assert.Contains(1, simpleArray.A); + Assert.Contains(2, simpleArray.A); + } + + public class GenericListOfIntegers + { + public List A { get; set; } + } + + [Test] + public void CanDeserializeGenericListOfIntegers() + { + //{ A: [1, 2] } + const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA"; + var simpleArray = Deserialize(bson); + Assert.IsNotNull(simpleArray); + Assert.IsNotNull(simpleArray.A); + Assert.AreEqual(2, simpleArray.A.Count); + Assert.Contains(1, simpleArray.A); + Assert.Contains(2, simpleArray.A); + } + + public class GenericListOfClasses + { + public IList A { get; set; } + } + + public class GenericListOfClassesA + { + public string B { get; set; } + } + + [Test] + public void CanDeserializeAGenericListOfClasses() + { + //{ A: [{ B: "b" }] } + const string bson = "HgAAAARBABYAAAADMAAOAAAAAkIAAgAAAGIAAAAA"; + var objectArray = Deserialize(bson); + Assert.IsNotNull(objectArray); + Assert.IsNotNull(objectArray.A); + Assert.AreEqual(1, objectArray.A.Count); + Assert.IsNotNull(objectArray.A[0].B); + Assert.AreEqual("b", objectArray.A[0].B); + } + + public class GenericListOfEmbeddedDocuments + { + public IList A { get; set; } + } + + [Test] + public void CanDeserializeAListOfEmbeddedDocuments() + { + //{ A: [{ B: "b" }] } + const string bson = "HgAAAARBABYAAAADMAAOAAAAAkIAAgAAAGIAAAAA"; + var objectArray = Deserialize(bson); + Assert.IsNotNull(objectArray); + Assert.IsNotNull(objectArray.A); + Assert.AreEqual(1, objectArray.A.Count); + Assert.IsNotNull(objectArray.A[0]["B"]); + Assert.AreEqual("b", objectArray.A[0]["B"]); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/PolymorphicObjectTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/PolymorphicObjectTests.cs new file mode 100644 index 00000000..2b4c1cd7 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/PolymorphicObjectTests.cs @@ -0,0 +1,85 @@ +using MongoDB.Configuration; +using MongoDB.Configuration.Mapping; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class PolymorphicObjectTests : SerializationTestBase + { + protected override IMappingStore MappingStore + { + get + { + var configure = new MongoConfigurationBuilder(); + configure.Mapping(mapping => + { + mapping.DefaultProfile(p => + { + p.SubClassesAre(t => t.IsSubclassOf(typeof(BaseClass))); + }); + + mapping.Map(); + mapping.Map(); + mapping.Map(); + }); + + return configure.BuildMappingStore(); + } + } + + public abstract class BaseClass + { + public string A { get; set; } + } + + public class ClassA : BaseClass + { + public string B { get; set; } + } + + public class ClassB : BaseClass + { + public string C { get; set; } + } + + public class ClassD : ClassA + { + public string E { get; set; } + } + + [Test] + public void CanDeserializeMiddleClassDirectly() + { + var doc = new Document("_t", "ClassB").Add("A", "a").Add("C", "c"); + var bson = Serialize(doc); + var classB = Deserialize(bson); + Assert.IsInstanceOfType(typeof(ClassB), classB); + Assert.AreEqual("a", classB.A); + Assert.AreEqual("c", classB.C); + } + + [Test] + public void CanDeserializeMiddleClassIndirectly() + { + var doc = new Document("_t", "ClassB").Add("A", "a").Add("C", "c"); + var bson = Serialize(doc); + var classB = Deserialize(bson); + Assert.IsInstanceOfType(typeof(ClassB), classB); + Assert.AreEqual("a", classB.A); + Assert.AreEqual("c", ((ClassB)classB).C); + } + + [Test] + public void CanDeserializeLeafClassIndirectly() + { + var doc = new Document("_t", new [] { "ClassA", "ClassD" }).Add("A", "a").Add("B", "b").Add("E", "e"); + var bson = Serialize(doc); + var classD = Deserialize(bson); + Assert.IsInstanceOfType(typeof(ClassD), classD); + Assert.AreEqual("a", classD.A); + Assert.AreEqual("b", ((ClassA)classD).B); + Assert.AreEqual("e", ((ClassD)classD).E); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Builders/ValueConversionTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Builders/ValueConversionTests.cs new file mode 100644 index 00000000..7f115824 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Builders/ValueConversionTests.cs @@ -0,0 +1,21 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Builders +{ + [TestFixture] + public class ValueConversionTests : SerializationTestBase + { + public class SimpleObject + { + public bool A { get; set; } + } + + [Test] + public void CanConvertSimpleValues(){ + var bson = Serialize(new Document("A", 1.0)); + var result = Deserialize(bson); + + Assert.IsTrue(result.A); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DocumentPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DocumentPropertyTests.cs new file mode 100644 index 00000000..b71c5c1c --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DocumentPropertyTests.cs @@ -0,0 +1,27 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class DocumentPropertyTests : SerializationTestBase + { + public class DocumentProperty + { + public Document A { get; set; } + } + + [Test] + public void CanSerialize() + { + var bson = Serialize(new DocumentProperty() { A = new Document("B", "b") }); + Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson); + } + + [Test] + public void CanSerializeUsingAnonymousType() + { + var bson = Serialize(new { A = new { B = "b" } }); + Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DotPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DotPropertyTests.cs new file mode 100644 index 00000000..8fdf8115 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/DotPropertyTests.cs @@ -0,0 +1,47 @@ +using MongoDB.Attributes; +using NUnit.Framework; +using System.Collections.Generic; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class DotPropertyTests : SerializationTestBase + { + public class DotClass + { + [MongoAlias("a")] + public List A { get; set; } + + [MongoAlias("c")] + public DotChildC C { get; set; } + } + + public class DotChildA + { + [MongoAlias("b")] + public int B { get; set; } + } + + public class DotChildC + { + [MongoAlias("d")] + public int D { get; set; } + } + + [Test] + public void CanSerializeWithChild() + { + var expected = Serialize(new Document("c.d", 10)); + var bson = Serialize(new Document("C.D", 10)); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeWithChildIndexer() + { + var expected = Serialize(new Document("a.5.b", 10)); + var bson = Serialize(new Document("A.5.B", 10)); + Assert.AreEqual(expected, bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EmbeddedClassPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EmbeddedClassPropertyTests.cs new file mode 100644 index 00000000..b068650f --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EmbeddedClassPropertyTests.cs @@ -0,0 +1,61 @@ +using MongoDB.Attributes; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class EmbeddedClassPropertyTests : SerializationTestBase + { + public class SimpleObject + { + public string A { get; set; } + public string B { get; set; } + public SimpleObjectC C { get; set; } + } + public class SimpleObjectC + { + public string D { get; set; } + } + + [Test] + public void CanSerializeASimpleObject() + { + var bson = Serialize(new SimpleObject() { A = "a", B = "b", C = new SimpleObjectC() { D = "d" } }); + Assert.AreEqual("KAAAAAJBAAIAAABhAAJCAAIAAABiAANDAA4AAAACRAACAAAAZAAAAA==", bson); + } + + [Test] + public void CanSerializeASimpleObjectUsingAnAnonymousType() + { + var bson = Serialize(new { A = "a", B = "b", C = new { D = "d" } }); + Assert.AreEqual("KAAAAAJBAAIAAABhAAJCAAIAAABiAANDAA4AAAACRAACAAAAZAAAAA==", bson); + } + + [Test] + public void CanSerializeASimpleObjectWithANullProperty() + { + var bson = Serialize(new SimpleObject()); + Assert.AreEqual("DgAAAApBAApCAApDAAA=", bson); + } + + public class SuperClass + { + [MongoAlias("a")] + public SuperClassA A { get; set; } + } + + public class SuperClassA + { + [MongoAlias("b")] + public string B { get; set; } + } + + [Test] + public void CanSerializeAnEmbeddedClassPropertyUsingDotSyntaxWhenAliasesExist() + { + var expected = Serialize(new Document("a.b", "b")); + var bson = Serialize(new Document("A.B", "b")); + Assert.AreEqual(expected, bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EnumerablePropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EnumerablePropertyTests.cs new file mode 100644 index 00000000..1af9eeea --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/EnumerablePropertyTests.cs @@ -0,0 +1,52 @@ +using System.Collections; +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class EnumerablePropertyTests : SerializationTestBase + { + public class Enumerable + { + public IEnumerable A { get; set; } + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerable() + { + var e = new Enumerable(); + e.A = new ArrayList { 1, 2 }; + string bson = Serialize(e); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerableUsingAnonymousType() + { + string bson = Serialize(new { A = new[] { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + public class EnumerableOfIntegers + { + public IEnumerable A { get; set; } + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerableOfInt() + { + var e = new EnumerableOfIntegers(); + e.A = new List { 1, 2 }; + string bson = Serialize(e); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + [Test] + public void CanDeserializeAnSimpleArrayAsIEnumerableOfIntUsingAnonymousType() + { + string bson = Serialize(new { A = new[] { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ExtendedPropertiesTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ExtendedPropertiesTests.cs new file mode 100644 index 00000000..184966f8 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ExtendedPropertiesTests.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class ExtendedPropertiesTests : SerializationTestBase + { + public class IDictionaryProperty + { + public IDictionary ExtendedProperties { get; set; } + } + + [Test] + public void CanSerializeUsingIDictionary() + { + var bson = Serialize(new IDictionaryProperty() { ExtendedProperties = new Dictionary { { "A", new Document("B", "b") } } }); + Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson); + } + + public class DictionaryProperty + { + public Dictionary ExtendedProperties { get; set; } + } + + [Test] + public void CanSerializeUsingDictionary() + { + var bson = Serialize(new DictionaryProperty() { ExtendedProperties = new Dictionary { { "A", new Document("B", "b") } } }); + Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson); + } + + public class DocumentProperty + { + public Document ExtendedProperties { get; set; } + } + + [Test] + public void CanSerializeUsingDocument() + { + var bson = Serialize(new DocumentProperty() { ExtendedProperties = new Document("A", new Document("B", "b")) }); + Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/GenericListPropertyTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/GenericListPropertyTests.cs new file mode 100644 index 00000000..263a55ba --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/GenericListPropertyTests.cs @@ -0,0 +1,102 @@ +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class GenericListPropertyTests : SerializationTestBase + { + public class GenericListOfObjectsProperty + { + public List A { get; set; } + } + + [Test] + public void CanSerializeAGenericListOfObjects() + { + var bson = Serialize(new GenericListOfObjectsProperty() { A = new List { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + [Test] + public void CanSerializeAGenericListOfObjectsUsingAnonymousType() + { + var bson = Serialize(new { A = new[] { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + public class GenericListOfIntegerProperty + { + public List A { get; set; } + } + + [Test] + public void CanSerializeAGenericListOfIntegers() + { + var bson = Serialize(new GenericListOfIntegerProperty() { A = new List { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + [Test] + public void CanSerializeAGenericListOfIntegersUsingAnonymousType() + { + var bson = Serialize(new { A = new[] { 1, 2 } }); + Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson); + } + + public class GenericListOfClasses + { + public IList A { get; set; } + } + + public class GenericListOfClassesA + { + public string B { get; set; } + } + + [Test] + public void CanSerializeAGenericListOfClasses() + { + var doc = new Document("A", new[] { new Document("B", "b") }); + var o = new GenericListOfClasses(); + o.A = new List { new GenericListOfClassesA() { B = "b" } }; + string bson = Serialize(o); + string expected = Serialize(doc); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeAGenericListOfClassesUsingAnonymousType() + { + var doc = new Document("A", new[] { new Document("B", "b") }); + string bson = Serialize(new { A = new[] { new { B = "b" } } }); + string expected = Serialize(doc); + Assert.AreEqual(expected, bson); + } + + public class GenericListOfEmbeddedDocuments + { + public IList A { get; set; } + } + + [Test] + public void CanSerializeAListOfEmbeddedDocuments() + { + var doc = new Document("A", new[] { new Document("B", "b" ) }); + var o = new GenericListOfEmbeddedDocuments(); + o.A = new List { new Document().Append("B", "b") }; + string bson = Serialize(o); + string expected = Serialize(doc); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeAListOfEmbeddedDocumentsUsingAnonymousType() + { + var doc = new Document("A", new[] { new Document("B", "b") }); + string bson = Serialize(new { A = new[] { new Document("B", "b") } }); + string expected = Serialize(doc); + Assert.AreEqual(expected, bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ModifierTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ModifierTests.cs new file mode 100644 index 00000000..61e58534 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/ModifierTests.cs @@ -0,0 +1,32 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + [Ignore("Currently we do not plan to support this. As an alternative you can use Mo or Linq.")] + public class ModifierTests : SerializationTestBase + { + public class ModifierEntity + { + public int A { get; set; } + } + + [Test] + public void CanSerializeIncrementUsingAnonymousType() + { + var expectedBson = Serialize(new Document("$inc", new Document("A", 1))); + var bson = Serialize(new { A = new Document("$inc", 1) }); + + Assert.AreEqual(expectedBson, bson); + } + + [Test] + public void CanSerializeSetUsingAnonymousType() + { + var expectedBson = Serialize(new Document("$set", new Document("A", 4))); + var bson = Serialize(new { A = new Document("$inc", 1) }); + + Assert.AreEqual(expectedBson, bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/OperatorTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/OperatorTests.cs new file mode 100644 index 00000000..6d1f4e2d --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/OperatorTests.cs @@ -0,0 +1,36 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class OperatorTests : SerializationTestBase + { + public class OperatorProperty + { + public int A { get; set; } + } + + [Test] + public void CanSerializeWithStandardOperatorUsingAnonymousType() + { + var bson = Serialize(new { A = Op.GreaterThan(12) }); + Assert.AreEqual("FgAAAANBAA4AAAAQJGd0AAwAAAAAAA==", bson); + } + + [Test] + public void CanSerializeWithMetaOperatorUsingAnonymousType() + { + var bson = Serialize(new { A = !Op.GreaterThan(12) }); + + Assert.AreEqual("IQAAAANBABkAAAADJG5vdAAOAAAAECRndAAMAAAAAAAA", bson); + } + + [Test] + public void CanSerializeWithComplexOperatorsUsingAnonymousType() + { + var bson = Serialize(new { A = Op.GreaterThan(12) & !Op.GreaterThan(24) }); + + Assert.AreEqual("KgAAAANBACIAAAAQJGd0AAwAAAADJG5vdAAOAAAAECRndAAYAAAAAAAA", bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/PolymorphicObjectTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/PolymorphicObjectTests.cs new file mode 100644 index 00000000..248e5e7a --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/PolymorphicObjectTests.cs @@ -0,0 +1,62 @@ +using NUnit.Framework; +using MongoDB.Configuration.Mapping.Auto; +using MongoDB.Configuration.Mapping; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class PolymorphicObjectTests : SerializationTestBase + { + protected override IMappingStore MappingStore + { + get + { + var profile = new AutoMappingProfile(); + profile.IsSubClassDelegate = t => t == typeof(ClassA) || t == typeof(ClassB); + var store = new AutoMappingStore(profile); + //eagerly automap so they are known at deserialization time... + store.GetClassMap(typeof(ClassA)); + store.GetClassMap(typeof(ClassB)); + return store; + } + } + + public abstract class BaseClass + { + public string A { get; set; } + } + + public class ClassA : BaseClass + { + public string B { get; set; } + } + + public class ClassB : BaseClass + { + public string C { get; set; } + } + + [Test] + public void CanDeserializeDirectly() + { + var classB = new ClassB() { A = "a", C = "c" }; + string bson = Serialize(classB); + Assert.AreEqual("JgAAAAJfdAAHAAAAQ2xhc3NCAAJBAAIAAABhAAJDAAIAAABjAAA=", bson); + } + + [Test] + public void CanDeserializeDirectlyWithAnonymousType() + { + string bson = Serialize(new { A = "a", C = "c" }); + Assert.AreEqual("JgAAAAJfdAAHAAAAQ2xhc3NCAAJBAAIAAABhAAJDAAIAAABjAAA=", bson); + } + + [Test] + public void CanDeserializeIndirectly() + { + var baseClass = new ClassB() { A = "a", C = "c" }; + string bson = Serialize(baseClass); + Assert.AreEqual("JgAAAAJfdAAHAAAAQ2xhc3NCAAJBAAIAAABhAAJDAAIAAABjAAA=", bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/WhereTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/WhereTests.cs new file mode 100644 index 00000000..43238a1d --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/WhereTests.cs @@ -0,0 +1,71 @@ +using MongoDB.Attributes; +using NUnit.Framework; +using System.Collections.Generic; + +namespace MongoDB.UnitTests.Serialization.Descriptors +{ + [TestFixture] + public class WhereTests : SerializationTestBase + { + public class WhereClass + { + [MongoAlias("a")] + public List A { get; set; } + + [MongoAlias("c")] + public WhereChildC C { get; set; } + } + + public class WhereChildA + { + [MongoAlias("b")] + public int B { get; set; } + } + + public class WhereChildC + { + [MongoAlias("d")] + public int D { get; set; } + } + + [Test] + public void CanSerializeWithChild() + { + var expected = Serialize(Op.Where("this.c.d > 10")); + var bson = Serialize(Op.Where("this.C.D > 10")); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeWithChildIndexer() + { + var expected = Serialize(Op.Where("this.a[0].b > 10")); + var bson = Serialize(Op.Where("this.A[0].B > 10")); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeWithNonMember() + { + var expected = Serialize(Op.Where("this.a.length > 10")); + var bson = Serialize(Op.Where("this.A.length > 10")); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeWithMethod() + { + var expected = Serialize(Op.Where("this.a.awesome().someProperty > 10")); + var bson = Serialize(Op.Where("this.A.awesome().someProperty > 10")); + Assert.AreEqual(expected, bson); + } + + [Test] + public void CanSerializeComplex() + { + var expected = Serialize(Op.Where("this.a[4].b > this.c.d && this.c.d == 2")); + var bson = Serialize(Op.Where("this.A[4].B > this.C.D && this.C.D == 2")); + Assert.AreEqual(expected, bson); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/___A1A.tmp b/source/MongoDB.Tests/UnitTests/Serialization/Descriptors/___A1A.tmp new file mode 100644 index 00000000..e69de29b diff --git a/source/MongoDB.Tests/UnitTests/Serialization/SerializationFactoryTests.cs b/source/MongoDB.Tests/UnitTests/Serialization/SerializationFactoryTests.cs new file mode 100644 index 00000000..13f10acf --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/SerializationFactoryTests.cs @@ -0,0 +1,365 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration; +using MongoDB.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Serialization +{ + [TestFixture] + public class SerializationFactoryTests : SerializationTestBase + { + [Test] + public void GetBsonReaderSettingsDefaults() + { + var factory = new SerializationFactory(); + var readerSettings = factory.GetBsonReaderSettings(typeof(int)); + Assert.AreEqual(readerSettings.ReadLocalTime,true); + Assert.IsNotNull(readerSettings.Builder); + } + + [Test] + public void ReadLocalTimeCanBeChangedByConfig() + { + var factory = new SerializationFactory(new MongoConfiguration {ReadLocalTime = false}); + var readerSettings = factory.GetBsonReaderSettings(typeof(int)); + Assert.AreEqual(readerSettings.ReadLocalTime, false); + } + + public class ProtectedConstructor + { + protected ProtectedConstructor(){} + } + + [Test] + public void CanCreateObjectFromProtectedConstructor() + { + var obj = Deserialize(EmptyDocumentBson); + + Assert.IsNotNull(obj); + } + + public class PrivateConstructor + { + private PrivateConstructor() { } + } + + [Test] + [ExpectedException(typeof(MissingMethodException))] + public void CanNotCreateObjectFromPrivateConstructor() + { + var obj = Deserialize(EmptyDocumentBson); + + Assert.IsNotNull(obj); + } + + public class SetProtectedPropertys + { + protected double Property { get; set; } + + public double GetProperty() {return Property; } + } + + [Test] + public void CanSetProtectedProperty() + { + var bson = Serialize(new Document("Property", 4)); + + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.AreEqual(4, prop.GetProperty()); + } + + public class SetPrivatePropertys + { + private double Property { get; set; } + + public double GetProperty() { return Property; } + } + + [Test] + public void CanNotSetPrivatePropertys() + { + var bson = Serialize(new Document("Property", 4)); + + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.AreEqual(0, prop.GetProperty()); + } + + public class NullableProperty + { + public double? Value { get; set; } + } + + [Test] + public void CanSetNullOnNullablPropertys() + { + var bson = Serialize(new Document("Value", null)); + + var obj = Deserialize(bson); + + Assert.IsNotNull(obj); + Assert.IsNull(obj.Value); + } + + [Test] + public void CanSetValueOnNullablPropertys() + { + var bson = Serialize(new Document("Value", 10)); + + var obj = Deserialize(bson); + + Assert.IsNotNull(obj); + Assert.AreEqual(10,obj.Value); + } + + public class GenericDictionary + { + public Dictionary Property { get; set; } + } + + [Test] + public void CanSerializeGenericDictionary() + { + var expectedBson = Serialize(new Document("Property", new Document() { { "key1", 10 }, { "key2", 20 } })); + var obj = new GenericDictionary { Property = new Dictionary { { "key1", 10 }, { "key2", 20 } } }; + var bson = Serialize(obj); + Assert.AreEqual(expectedBson, bson); + } + + [Test] + public void CanDeserializeGenericDictionary() + { + var bson = Serialize(new Document("Property", new Document() { { "key1", 10 }, { "key2", 20 } })); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Property); + Assert.AreEqual(2,prop.Property.Count); + Assert.Contains(new KeyValuePair("key1", 10), prop.Property); + Assert.Contains(new KeyValuePair("key2", 20), prop.Property); + } + + public class GenericStringDictionaryWithComplexType + { + public Dictionary Dict { get; set; } + } + + + public class GenericDictionaryComplexType + { + public string Name { get; set; } + } + + [Test] + public void CanSerializeStringGenericDictionaryWithComplexType() + { + var expectedBson = Serialize(new Document("Dict", new Document { { "key1", new Document("Name", "a") }, { "key2", new Document("Name", "b") } })); + var obj = new GenericStringDictionaryWithComplexType { Dict = new Dictionary { { "key1", new GenericDictionaryComplexType { Name = "a" } }, { "key2", new GenericDictionaryComplexType { Name = "b" } } } }; + var bson = Serialize(obj); + Assert.AreEqual(expectedBson, bson); + } + + [Test] + public void CanDeserializeStringGenericDictionaryWithComplexType() + { + var bson = Serialize(new Document("Dict", new Document { { "key1", new Document("Name", "a") }, { "key2", new Document("Name", "b") } })); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Dict); + Assert.AreEqual(2, prop.Dict.Count); + Assert.IsTrue(prop.Dict["key1"].Name == "a"); + Assert.IsTrue(prop.Dict["key2"].Name == "b"); + } + + public class GenericIntDictionaryWithComplexType + { + public Dictionary Dict { get; set; } + } + + [Test] + public void CanSerializeIntGenericDictionaryWithComplexType() + { + var expectedBson = Serialize(new Document("Dict", new Document { { "1", new Document("Name", "a") }, { "2", new Document("Name", "b") } })); + var obj = new GenericIntDictionaryWithComplexType { Dict = new Dictionary { { 1, new GenericDictionaryComplexType { Name = "a" } }, { 2, new GenericDictionaryComplexType { Name = "b" } } } }; + var bson = Serialize(obj); + Assert.AreEqual(expectedBson, bson); + } + + [Test] + public void CanDeserializeIntGenericDictionaryWithComplexType() + { + var bson = Serialize(new Document("Dict", new Document { { "1", new Document("Name", "a") }, { "2", new Document("Name", "b") } })); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Dict); + Assert.AreEqual(2, prop.Dict.Count); + Assert.IsTrue(prop.Dict[1].Name == "a"); + Assert.IsTrue(prop.Dict[2].Name == "b"); + } + + public class SortedListDictionary + { + public SortedList Property { get; set; } + } + + [Test] + public void CanSerializeSortedListDictionary() + { + var expectedBson = Serialize(new Document("Property", new Document { { "key1", 10 }, { "key2", 20 } })); + var obj = new SortedListDictionary { Property = new SortedList { { "key1", 10 }, { "key2", 20 } } }; + var bson = Serialize(obj); + Assert.AreEqual(expectedBson, bson); + } + + [Test] + public void CanDeserializeSortedListDictionary() + { + var bson = Serialize(new Document("Property", new Document { { "key1", 10 }, { "key2", 20 } })); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Property); + Assert.AreEqual(2, prop.Property.Count); + Assert.Contains(new KeyValuePair("key1", 10), prop.Property); + Assert.Contains(new KeyValuePair("key2", 20), prop.Property); + } + + public class HashSetHelper + { + public HashSet Property { get; set; } + } + + [Test] + public void CanSerializeAndDeserializeHashSet() + { + var obj = new HashSetHelper {Property = new HashSet {"test1", "test2"}}; + var bson = Serialize(obj); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Property); + Assert.AreEqual(2, prop.Property.Count); + + Assert.IsTrue(prop.Property.Contains("test1")); + Assert.IsTrue(prop.Property.Contains("test2")); + } + + public class EnumHelper + { + public enum Test + { + A=1, + B=2 + } + + public List Tests { get; set; } + } + + [Test] + public void CanSerializerAndDesializeEnumLists() + { + var helper = new EnumHelper {Tests = new List {EnumHelper.Test.A}}; + var bson = Serialize(helper); + var deserialize = Deserialize(bson); + + Assert.IsNotNull(deserialize); + Assert.IsNotNull(deserialize.Tests); + Assert.Contains(EnumHelper.Test.A, deserialize.Tests); + } + + public class ByteArrayHelper + { + public byte[] Property { get; set; } + } + + [Test] + public void CanWriteByteArrayPropertyFromBinary() + { + var bson = Serialize(new Document("Property", new Binary(new byte[] {1, 2, 3, 4}))); + + var helper = Deserialize(bson); + + Assert.IsNotNull(helper); + Assert.AreEqual(4, helper.Property.Length); + Assert.AreEqual(1, helper.Property[0]); + Assert.AreEqual(2, helper.Property[1]); + Assert.AreEqual(3, helper.Property[2]); + Assert.AreEqual(4, helper.Property[3]); + } + + public class EmbeddedDocumentHelper + { + public Document Document { get; set; } + } + + [Test] + public void CanReadEmbeddedDocument() + { + var bson = Serialize(new Document("Document", new Document("Embedded",new Document("value", 10)))); + + var helper = Deserialize(bson); + + Assert.IsNotNull(helper); + Assert.IsNotNull(helper.Document); + Assert.AreEqual(1, helper.Document.Count); + + var embedded = helper.Document["Embedded"] as Document; + Assert.IsNotNull(embedded); + Assert.AreEqual(1, embedded.Count); + Assert.AreEqual(10, embedded["value"]); + } + + public class DictionaryWithEnumAsKeyHelper + { + public Dictionary Dict { get; set; } + } + + [Test] + public void SerializesAnEnumAsIntWhenItsUsedAsDictionaryKey() + { + var obj = new DictionaryWithEnumAsKeyHelper { Dict = new Dictionary { { DateTimeKind.Utc, 9 } } }; + var bson = Serialize(obj); + var doc = Deserialize(bson); + + Assert.IsNotNull(doc); + var dict = doc["Dict"] as Document; + Assert.IsNotNull(dict); + Assert.AreEqual(1, dict.Count); + Assert.AreEqual(9, dict[Convert.ToString((int)DateTimeKind.Utc)]); + } + + [Test] + public void CanDeserializeADictionaryWithEnumAsKey() + { + var bson = Serialize(new Document("Dict", new Document(( (int)DateTimeKind.Utc ).ToString(), 9))); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNotNull(prop.Dict); + Assert.AreEqual(1,prop.Dict.Count); + Assert.AreEqual(9,prop.Dict[DateTimeKind.Utc]); + } + + public class NullDictionaryPropertyHelper + { + public Dictionary Dict { get; set; } + } + + [Test] + public void CanDeserializeAndNullDictionaryProperty() + { + var bson = Serialize(new Document("Dict", null)); + var prop = Deserialize(bson); + + Assert.IsNotNull(prop); + Assert.IsNull(prop.Dict); + } + + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/Serialization/SerializationTestBase.cs b/source/MongoDB.Tests/UnitTests/Serialization/SerializationTestBase.cs new file mode 100644 index 00000000..e61382bd --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/Serialization/SerializationTestBase.cs @@ -0,0 +1,49 @@ +using System; +using System.IO; +using MongoDB.Bson; +using MongoDB.Configuration.Mapping; +using MongoDB.Serialization; + +namespace MongoDB.UnitTests.Serialization +{ + public abstract class SerializationTestBase + { + public const string EmptyDocumentBson = "BQAAAAA="; + + protected virtual IMappingStore MappingStore + { + get { return new AutoMappingStore(); } + } + + protected T Deserialize(string base64) + { + using (var mem = new MemoryStream(Convert.FromBase64String(base64))) + { + var reader = new BsonReader(mem, new BsonClassMapBuilder(MappingStore, typeof(T))); + return (T)reader.ReadObject(); + } + } + + protected string Serialize(object instance) + { + using (var mem = new MemoryStream()) + { + var writer = new BsonWriter(mem, new BsonClassMapDescriptor(MappingStore, typeof(T))); + writer.WriteObject(instance); + writer.Flush(); + return Convert.ToBase64String(mem.ToArray()); + } + } + + protected string Serialize(Document document) + { + using (var mem = new MemoryStream()) + { + var writer = new BsonWriter(mem, new BsonDocumentDescriptor()); + writer.WriteObject(document); + writer.Flush(); + return Convert.ToBase64String(mem.ToArray()); + } + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/TestBinary.cs b/source/MongoDB.Tests/UnitTests/TestBinary.cs new file mode 100644 index 00000000..b6bc8235 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestBinary.cs @@ -0,0 +1,129 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; +using System.Linq; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestBinary + { + [Test] + public void CanCreateBinary(){ + var binary = new Binary(); + Assert.IsNull(binary.Bytes); + Assert.AreEqual(BinarySubtype.Unknown, binary.Subtype); + } + + [Test] + public void CanCreateBinaryFromNull(){ + var binary = new Binary(null); + Assert.IsNull(binary.Bytes); + Assert.AreEqual(BinarySubtype.General, binary.Subtype); + } + + [Test] + public void CanCreateBinaryFromBytes(){ + var bytes = new byte[] { 10 }; + var binary = new Binary(bytes); + Assert.AreEqual(bytes,binary.Bytes); + Assert.AreEqual(BinarySubtype.General, binary.Subtype); + } + + [Test] + public void CanCreateBinaryFromBytesAndSubtype(){ + var bytes = new byte[] {10}; + var binary = new Binary(bytes,BinarySubtype.UserDefined); + Assert.AreEqual(bytes, binary.Bytes); + Assert.AreEqual(BinarySubtype.UserDefined, binary.Subtype); + } + + [Test] + public void CanImplicitConvertedToBytes(){ + var bytes = new byte[]{10,12}; + var binary = new Binary(bytes); + var converted = (byte[])binary; + Assert.IsNotNull(converted); + Assert.AreEqual(bytes, converted); + } + + [Test] + public void CanImplicitConvertedFromBytes(){ + var bytes = new byte[] {10, 12}; + var binary = (Binary)bytes; + Assert.IsNotNull(binary); + Assert.AreEqual(bytes,binary.Bytes); + } + + [Test] + public void CanBeCloned(){ + var binarySource = new Binary(new byte[] {10, 20}, BinarySubtype.UserDefined); + var binaryDest = binarySource.Clone() as Binary; + Assert.IsNotNull(binaryDest); + Assert.AreEqual(binarySource.Bytes,binaryDest.Bytes); + Assert.AreEqual(binarySource.Subtype,binaryDest.Subtype); + } + + [Test] + public void CanBeEnumerated() + { + var binary = new Binary(new byte[] { 10, 20 }); + + var array = binary.ToArray(); + Assert.AreEqual(2,array.Length); + Assert.AreEqual(10, array[0]); + Assert.AreEqual(20, array[1]); + } + + [Test] + public void CanBeBinarySerialized() + { + var source = new Binary(new byte[] {10, 20}, BinarySubtype.Md5); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (Binary)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeEqual() + { + var binary1 = new Binary(new byte[] { 10, 20 }, BinarySubtype.Md5); + var binary2 = new Binary(new byte[] { 10, 20 }, BinarySubtype.Md5); + + Assert.AreEqual(binary1,binary2); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new Binary(new byte[] { 10, 20 }, BinarySubtype.Md5); + var serializer = new XmlSerializer(typeof(Binary)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Binary)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerializedWhenNullBytes() + { + var source = new Binary(null, BinarySubtype.Md5); + var serializer = new XmlSerializer(typeof(Binary)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Binary)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + } +} diff --git a/source/MongoDB.Tests/UnitTests/TestCode.cs b/source/MongoDB.Tests/UnitTests/TestCode.cs new file mode 100644 index 00000000..24b43b0b --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestCode.cs @@ -0,0 +1,77 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestCode + { + [Test] + public void CanBeConstructedWithValue() + { + var code = new Code("code"); + + Assert.AreEqual("code",code.Value); + } + + [Test] + public void CanBeConstructoredWithNull() + { + var code = new Code(null); + + Assert.IsNull(code.Value); + } + + [Test] + public void CanBeEqual() + { + var code1 = new Code("code"); + var code2 = new Code("code"); + + Assert.AreEqual(code1,code2); + } + + [Test] + public void CanBeBinarySerialized() + { + var source = new Code("code"); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (Code)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new Code("code"); + var serializer = new XmlSerializer(typeof(Code)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Code)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerializedWithNullValue() + { + var source = new Code(null); + var serializer = new XmlSerializer(typeof(Code)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Code)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestCodeWScope.cs b/source/MongoDB.Tests/UnitTests/TestCodeWScope.cs new file mode 100644 index 00000000..62fd50c2 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestCodeWScope.cs @@ -0,0 +1,77 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestCodeWScope + { + [Test] + public void CanBeBinarySerialized() + { + var source = new CodeWScope("code", new Document("key", "value")); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (CodeWScope)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeConstructedWithValue() + { + var code = new CodeWScope("code"); + + Assert.AreEqual("code", code.Value); + } + + [Test] + public void CanBeConstructoredWithNull() + { + var code = new CodeWScope(null); + + Assert.IsNull(code.Value); + } + + [Test] + public void CanBeEqual() + { + var code1 = new CodeWScope("code", new Document("key", "value")); + var code2 = new CodeWScope("code", new Document("key", "value")); + + Assert.AreEqual(code1, code2); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new CodeWScope("code",new Document("key","value")); + var serializer = new XmlSerializer(typeof(CodeWScope)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (CodeWScope)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerializedWithNullValue() + { + var source = new CodeWScope(null,null); + var serializer = new XmlSerializer(typeof(CodeWScope)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (CodeWScope)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestConnectionStringBuilder.cs b/source/MongoDB.Tests/UnitTests/TestConnectionStringBuilder.cs new file mode 100644 index 00000000..4b326241 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestConnectionStringBuilder.cs @@ -0,0 +1,147 @@ +using System; +using System.Collections.Generic; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestConnectionStringBuilder + { + [Test] + public void TestConnectionStringParsing(){ + var builder = + new MongoConnectionStringBuilder( + "Username=testuser;Password=testpassword;Server=testserver:555;ConnectionLifetime=50;MaximumPoolSize=101;MinimumPoolSize=202;Pooled=false;Database=testdatabase"); + Assert.AreEqual("testuser", builder.Username); + Assert.AreEqual("testpassword", builder.Password); + Assert.AreEqual("testdatabase", builder.Database); + Assert.AreEqual(101, builder.MaximumPoolSize); + Assert.AreEqual(202, builder.MinimumPoolSize); + Assert.AreEqual(TimeSpan.FromSeconds(50), builder.ConnectionLifetime); + Assert.AreEqual(false, builder.Pooled); + + var servers = new List(builder.Servers); + Assert.AreEqual(1, servers.Count); + Assert.AreEqual("testserver", servers[0].Host); + Assert.AreEqual(555, servers[0].Port); + } + + [Test] + public void TestConnectionStringParsingServerWithoutPort(){ + var builder = new MongoConnectionStringBuilder("Username=testuser;Password=testpassword;Server=testserver"); + Assert.AreEqual("testuser", builder.Username); + Assert.AreEqual("testpassword", builder.Password); + + var servers = new List(builder.Servers); + Assert.AreEqual(1, servers.Count); + Assert.AreEqual("testserver", servers[0].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, servers[0].Port); + } + + [Test] + public void TestCreateEmptyInstance(){ + new MongoConnectionStringBuilder(); + } + + [Test] + public void TestDefaults(){ + var builder = new MongoConnectionStringBuilder(); + Assert.IsNull(builder.Username); + Assert.IsNull(builder.Password); + Assert.AreEqual(builder.MaximumPoolSize, MongoConnectionStringBuilder.DefaultMaximumPoolSize); + Assert.AreEqual(builder.MinimumPoolSize, MongoConnectionStringBuilder.DefaultMinimumPoolSize); + Assert.AreEqual(builder.ConnectionLifetime, MongoConnectionStringBuilder.DefaultConnectionLifeTime); + Assert.AreEqual(builder.ConnectionTimeout, MongoConnectionStringBuilder.DefaultConnectionTimeout); + Assert.AreEqual(builder.Database, MongoConnectionStringBuilder.DefaultDatabase); + Assert.AreEqual(builder.Pooled, MongoConnectionStringBuilder.DefaultPooled); + + var servers = new List(builder.Servers); + Assert.AreEqual(1, servers.Count); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, servers[0].Port); + Assert.AreEqual(MongoServerEndPoint.DefaultHost, servers[0].Host); + } + + [Test] + public void TestSimpleUriString(){ + var builder = new MongoConnectionStringBuilder("mongodb://server"); + Assert.AreEqual(1, builder.Servers.Length); + Assert.AreEqual(MongoConnectionStringBuilder.DefaultDatabase, builder.Database); + Assert.AreEqual("server", builder.Servers[0].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, builder.Servers[0].Port); + } + + [Test] + public void TestSimpleUriStringWithDatabase(){ + var builder = new MongoConnectionStringBuilder("mongodb://server/database"); + Assert.AreEqual("database", builder.Database); + Assert.AreEqual(1, builder.Servers.Length); + Assert.AreEqual("server", builder.Servers[0].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, builder.Servers[0].Port); + } + + [Test] + public void TestToStringOutput(){ + var builder = new MongoConnectionStringBuilder + { + Password = "testpassword", + Username = "testusername", + ConnectionLifetime = TimeSpan.FromSeconds(50), + MaximumPoolSize = 101, + MinimumPoolSize = 202, + ConnectionTimeout = TimeSpan.FromSeconds(60) + }; + builder.AddServer("testserver1", 555); + builder.AddServer("testserver2"); + + Assert.AreEqual( + "Username=testusername;Password=testpassword;Server=testserver1:555,testserver2;MaximumPoolSize=101;MinimumPoolSize=202;ConnectionTimeout=60;ConnectionLifetime=50", + builder.ToString()); + } + + [Test] + public void TestToStringOutputWithDefaultServerPort(){ + var builder = new MongoConnectionStringBuilder(); + builder.AddServer("testserver"); + Assert.AreEqual("Server=testserver", builder.ToString()); + } + + [Test] + public void TestToStringOutputWithoutUsernameAndPassword(){ + var builder = new MongoConnectionStringBuilder(); + builder.AddServer("testserver", 555); + + Assert.AreEqual("Server=testserver:555", builder.ToString()); + } + + [Test] + public void TestUriStringWithUsenameAndPasswort(){ + var builder = new MongoConnectionStringBuilder("mongodb://username:password@server"); + Assert.AreEqual("username", builder.Username); + Assert.AreEqual("password", builder.Password); + Assert.AreEqual(MongoConnectionStringBuilder.DefaultDatabase, builder.Database); + Assert.AreEqual(1, builder.Servers.Length); + Assert.AreEqual("server", builder.Servers[0].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, builder.Servers[0].Port); + } + + [Test] + public void TestUriWithMultipleServers(){ + var builder = new MongoConnectionStringBuilder("mongodb://server1,server2:1234,server3/database"); + Assert.AreEqual("database", builder.Database); + Assert.AreEqual(3, builder.Servers.Length); + Assert.AreEqual("server1", builder.Servers[0].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, builder.Servers[0].Port); + Assert.AreEqual("server2", builder.Servers[1].Host); + Assert.AreEqual(1234, builder.Servers[1].Port); + Assert.AreEqual("server3", builder.Servers[2].Host); + Assert.AreEqual(MongoServerEndPoint.DefaultPort, builder.Servers[2].Port); + } + + [Test] + public void TestUriWithPropertys(){ + var builder = new MongoConnectionStringBuilder("mongodb://server1/database?pooled=false&connectionlifetime=10"); + Assert.AreEqual(false, builder.Pooled); + Assert.AreEqual(10.0, builder.ConnectionLifetime.TotalSeconds); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestDBRef.cs b/source/MongoDB.Tests/UnitTests/TestDBRef.cs new file mode 100644 index 00000000..ac679b22 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestDBRef.cs @@ -0,0 +1,108 @@ +using System; +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using MongoDB.Util; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestDBRef + { + [Test] + public void CanBeBinarySerialized() + { + var source = new DBRef("collection", "id"); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (DBRef)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new DBRef("collection", "id"); + var serializer = new XmlSerializer(typeof(DBRef)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (DBRef)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void TestCastsToDocument() + { + var ogen = new OidGenerator(); + var dref = new DBRef("tests.dbrefs", ogen.Generate()); + var doc = (Document)dref; + Assert.AreEqual(dref.CollectionName, doc[DBRef.RefName]); + } + + [Test] + public void TestEqualsAreSameObject() + { + var r = new DBRef("tests", "2312314"); + Assert.AreEqual(r, r); + } + + [Test] + public void TestEqualsUsingSameValues() + { + const string colname = "tests"; + const string id = "32312312"; + var r = new DBRef(colname, id); + var r2 = new DBRef(colname, id); + + Assert.AreEqual(r, r2); + } + + [Test] + public void TestFromDocument() + { + const string colname = "tests"; + const string id = "32312312"; + var doc = new Document().Add(DBRef.RefName, colname).Add(DBRef.IdName, id); + + var expected = new DBRef(colname, id); + Assert.AreEqual(expected, DBRef.FromDocument(doc)); + } + + [Test] + [ExpectedException(typeof(ArgumentException))] + public void TestFromIncompleteDocumentThrowsArguementException() + { + DBRef.FromDocument(new Document(DBRef.RefName, "tests")); + } + + [Test] + public void TestIsDocumentDBRef() + { + var doc = new Document(); + + Assert.IsFalse(DBRef.IsDocumentDBRef(null)); + Assert.IsFalse(DBRef.IsDocumentDBRef(doc)); + + doc[DBRef.RefName] = "tests"; + Assert.IsFalse(DBRef.IsDocumentDBRef(doc)); + + doc.Remove(DBRef.RefName); + doc[DBRef.IdName] = "12312131"; + Assert.IsFalse(DBRef.IsDocumentDBRef(doc)); + + doc[DBRef.RefName] = "tests"; + Assert.IsTrue(DBRef.IsDocumentDBRef(doc)); + + doc[DBRef.MetaName] = new Document(); + Assert.IsTrue(DBRef.IsDocumentDBRef(doc)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestDocument.cs b/source/MongoDB.Tests/UnitTests/TestDocument.cs new file mode 100644 index 00000000..34e53622 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestDocument.cs @@ -0,0 +1,316 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestDocument + { + private static void AreEqual(Document d1, Document d2) + { + if(!d1.Equals(d2)) + Assert.Fail(string.Format("Documents don't match\r\nExpected: {0}\r\nActual: {1}", d1, d2)); + } + + private static void AreNotEqual(Document d1, Document d2) + { + if(d1.Equals(d2)) + Assert.Fail(string.Format("Documents match\r\nExpected: not {0}\r\nActual: {1}", d1, d2)); + } + + private class ReverseComparer : IComparer + { + public int Compare(string x, string y) + { + return y.CompareTo(x); + } + } + + [Test] + public void TestClearRemovesAll() + { + var d = new Document(); + d["one"] = 1; + d.Add("two", 2); + d["three"] = 3; + Assert.AreEqual(3, d.Count); + d.Clear(); + Assert.AreEqual(0, d.Count); + Assert.IsNull(d["one"]); + Assert.IsFalse(d.ContainsKey("one")); + } + + [Test] + public void TestCopyToCopiesAndOverwritesKeys() + { + var d = new Document(); + var dest = new Document(); + dest["two"] = 200; + d["one"] = 1; + d.Add("two", 2); + d["three"] = 3; + d.CopyTo(dest); + Assert.AreEqual(2, dest["two"]); + } + + [Test] + public void TestCopyToCopiesAndPreservesKeyOrderToEmptyDoc() + { + var d = new Document(); + var dest = new Document(); + d["one"] = 1; + d.Add("two", 2); + d["three"] = 3; + d.CopyTo(dest); + var cnt = 1; + foreach(var key in dest.Keys) + { + Assert.AreEqual(cnt, d[key]); + cnt++; + } + } + + [Test] + public void TestDocumentCanCreatedFromDictionary() + { + var dictionary = new Dictionary {{"value1", "test"}, {"value2", 10}}; + var document = new Document(dictionary); + Assert.AreEqual(2, document.Count); + Assert.AreEqual("test", document["value1"]); + Assert.AreEqual(10, document["value2"]); + } + + [Test] + public void TestDocumentIsSerializable() + { + var src = new Document().Add("test", 2); + using(var mem = new MemoryStream()) + { + var formatter = new BinaryFormatter(); + formatter.Serialize(mem, src); + mem.Position = 0; + var dest = (Document)formatter.Deserialize(mem); + AreEqual(src, dest); + } + } + + [Test] + public void TestIdReturnsNullIfNotSet() + { + var document = new Document(); + Assert.IsNull(document.Id); + } + + [Test] + public void TestIdSets_IdField() + { + var document = new Document {Id = 10}; + Assert.AreEqual(10, document.Id); + } + + [Test] + public void TestInsertMaintainsKeyOrder() + { + var d = new Document(); + d["one"] = 1; + d.Insert("zero", 0, 0); + + var keysList = d.Keys as IEnumerable; + foreach(var k in d.Keys) + { + Assert.AreEqual("zero", k); + break; + } + } + + [Test] + [ExpectedException(ExceptionType = typeof(ArgumentException), + MatchType = MessageMatch.Contains)] + public void TestInsertWillThrowArgumentExceptionIfKeyAlreadyExists() + { + var d = new Document(); + d["one"] = 1; + d.Insert("one", 1, 0); + } + + [Test] + public void TestKeyOrderIsPreserved() + { + var d = new Document(); + d["one"] = 1; + d.Add("two", 2); + d["three"] = 3; + var cnt = 1; + foreach(var key in d.Keys) + { + Assert.AreEqual(cnt, d[key]); + cnt++; + } + } + + [Test] + public void TestKeyOrderPreservedOnRemove() + { + var d = new Document(); + d["one"] = 1; + d["onepointfive"] = 1.5; + d.Add("two", 2); + d.Add("two.5", 2.5); + d.Remove("two.5"); + d["three"] = 3; + d.Remove("onepointfive"); + var cnt = 1; + foreach(var key in d.Keys) + { + Assert.AreEqual(cnt, d[key]); + cnt++; + } + } + + [Test] + public void TestMaintainsOrderUsingMultipleMethods() + { + var d = new Document(new ReverseComparer()); + d["one"] = 1; + var test = d["one"]; + d["zero"] = 0; + + var keysList = d.Keys as IEnumerable; + Assert.AreEqual(keysList.First(), "zero"); + } + + [Test] + public void TestRemove() + { + var d = new Document(); + d["one"] = 1; + d.Remove("one"); + Assert.IsFalse(d.ContainsKey("one")); + } + + [Test] + public void TestSetNullValue() + { + var document = new Document(); + document.Add("value", null); + Assert.AreEqual(1, document.Count); + Assert.IsNull(document["value"]); + } + + [Test] + public void TestTwoDocumentsWithDifferentDocumentChildTreeAreNotEqual() + { + var d1 = new Document().Add("k1", new Document().Add("k2", new Document().Add("k3", "foo"))); + var d2 = new Document().Add("k1", new Document().Add("k2", new Document().Add("k3", "bar"))); + AreNotEqual(d1, d2); + } + + [Test] + public void TestTwoDocumentsWithMisorderedArrayContentAreNotEqual() + { + var d1 = new Document().Add("k1", new[] {"v1", "v2"}); + var d2 = new Document().Add("k1", new[] {"v2", "v1"}); + AreNotEqual(d1, d2); + } + + [Test] + public void TestTwoDocumentsWithSameArrayContentAreEqual() + { + var d1 = new Document().Add("k1", new[] {"v1", "v2"}); + var d2 = new Document().Add("k1", new[] {"v1", "v2"}); + AreEqual(d1, d2); + } + + [Test] + public void TestTwoDocumentsWithSameContentInDifferentOrderAreNotEqual() + { + var d1 = new Document().Add("k1", "v1").Add("k2", "v2"); + var d2 = new Document().Add("k2", "v2").Add("k1", "v1"); + AreNotEqual(d1, d2); + } + + [Test] + public void TestTwoDocumentsWithSameContentInSameOrderAreEqual() + { + var d1 = new Document().Add("k1", "v1").Add("k2", "v2"); + var d2 = new Document().Add("k1", "v1").Add("k2", "v2"); + AreEqual(d1, d2); + } + + [Test] + public void TestTwoDocumentsWithSameDocumentChildTreeAreEqual() + { + var d1 = new Document().Add("k1", new Document().Add("k2", new Document().Add("k3", "foo"))); + var d2 = new Document().Add("k1", new Document().Add("k2", new Document().Add("k3", "foo"))); + AreEqual(d1, d2); + } + + [Test] + public void TestUseOfIComparerForKeys() + { + var doc = new Document(new ReverseComparer()); + + doc.Append("a", 3); + doc.Append("b", 2); + doc.Append("c", 1); + + Assert.AreEqual("c", doc.Keys.First()); + } + + [Test] + public void TestValues() + { + var d = new Document(); + d["one"] = 1; + d.Add("two", 2); + d["three"] = 3; + var vals = d.Values; + Assert.AreEqual(3, vals.Count); + } + + [Test] + public void TestValuesAdded() + { + var d = new Document(); + d["test"] = 1; + Assert.AreEqual(1, d["test"]); + } + + [Test] + public void CanBeBinarySerialized() + { + var source = new Document("key1", "value1").Add("key2", 10); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (Document)formatter.Deserialize(mem); + + Assert.AreEqual(2,dest.Count); + Assert.AreEqual(source["key1"], dest["key1"]); + Assert.AreEqual(source["key2"], dest["key2"]); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new Document("key1", "value1").Add("key2", new Document("key", "value").Add("key2", null)); + var serializer = new XmlSerializer(typeof(Document)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Document)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(2, dest.Count); + Assert.AreEqual(source["key1"], dest["key1"]); + Assert.AreEqual(source["key2"], dest["key2"]); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestMo.cs b/source/MongoDB.Tests/UnitTests/TestMo.cs new file mode 100644 index 00000000..7f31ced0 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestMo.cs @@ -0,0 +1,141 @@ +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestMo + { + [Test] + public void TestInc(){ + var mo = Mo.Inc("A", 10); + + var expected = new Document("$inc", new Document("A", 10)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestSet() + { + var mo = Mo.Set("A", 10); + + var expected = new Document("$set", new Document("A", 10)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestUnset() + { + var mo = Mo.Unset("A"); + + var expected = new Document("$unset", new Document("A", 1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestUnsetArray(){ + var array = new[] {"A", "B"}; + var mo = Mo.Unset(array); + + var expected = new Document("$unset", new Document("A",1).Add("B",1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPush() + { + var mo = Mo.Push("A",1); + + var expected = new Document("$push", new Document("A", 1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPushAll() + { + var array = new object[] {1, "C"}; + var mo = Mo.PushAll("A", array); + + var expected = new Document("$pushAll", new Document("A", array)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestAddToSet() + { + var mo = Mo.AddToSet("A", 1); + + var expected = new Document("$addToSet", new Document("A", 1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestAddToSetArray() + { + var array = new object[] { 1, "C" }; + var mo = Mo.AddToSet("A", array); + + var expected = new Document("$addToSet", new Document("A", new Document("$each", array))); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPopFirst() + { + var mo = Mo.PopFirst("A"); + + var expected = new Document("$pop", new Document("A", -1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPopLast() + { + var mo = Mo.PopLast("A"); + + var expected = new Document("$pop", new Document("A", 1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPull() + { + var mo = Mo.Pull("A",1); + + var expected = new Document("$pull", new Document("A", 1)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestPullAll() + { + var array = new object[] { 1, "C" }; + var mo = Mo.PullAll("A", array); + + var expected = new Document("$pullAll", new Document("A", array)); + + Assert.AreEqual(expected, mo); + } + + [Test] + public void TestMultipleModifiers() + { + var mo = Mo.Inc("A", 1) & Mo.Inc("B", 2) & Mo.Set("C",3); + + var expected = new Document("$inc", new Document("A", 1).Add("B", 2)) + .Add("$set",new Document("C",3)); + + Assert.AreEqual(expected, mo); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestMongoRegex.cs b/source/MongoDB.Tests/UnitTests/TestMongoRegex.cs new file mode 100644 index 00000000..8e763bd0 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestMongoRegex.cs @@ -0,0 +1,142 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Text.RegularExpressions; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestMongoRegex + { + [Test] + public void CanBeCunstructedFromNullExpressionAndOptions() + { + var regex = new MongoRegex(null, null); + Assert.IsNull(regex.Expression); + Assert.IsNull(regex.RawOptions); + } + + [Test] + public void CanBeConstructed() + { + var regex = new MongoRegex("expression"); + Assert.AreEqual("expression", regex.Expression); + Assert.AreEqual(string.Empty, regex.RawOptions); + } + + [Test] + public void CanBeConstructedWithOption() + { + var regex = new MongoRegex("expression", "options"); + Assert.AreEqual("expression",regex.Expression); + Assert.AreEqual("options",regex.RawOptions); + } + + [Test] + public void CanBeConstructedFromRegex() + { + const RegexOptions options = RegexOptions.IgnoreCase | + RegexOptions.IgnorePatternWhitespace | + RegexOptions.Multiline; + + var regex = new MongoRegex(new Regex("expression", options)); + Assert.AreEqual("expression", regex.Expression); + Assert.AreEqual("img", regex.RawOptions); + } + + [Test] + public void MongoRegexOptionFlagsAreIndependent() + { + var regex = new MongoRegex("expression", MongoRegexOption.IgnoreCase); + Assert.AreEqual("expression", regex.Expression); + Assert.AreEqual("i", regex.RawOptions); + } + + [Test] + public void CanBeConstructedWithMongoRegexOption() + { + var regex = new MongoRegex("expression", MongoRegexOption.IgnoreCase | MongoRegexOption.IgnorePatternWhitespace | MongoRegexOption.Multiline); + Assert.AreEqual("expression", regex.Expression); + Assert.AreEqual("img", regex.RawOptions); + } + + [Test] + public void CanBeConstructedWithRegexOptions() + { + var regex = new MongoRegex("expression", RegexOptions.IgnoreCase | RegexOptions.Multiline); + Assert.AreEqual("expression", regex.Expression); + Assert.AreEqual("im", regex.RawOptions); + } + + [Test] + public void CanReadOptions() + { + var regex = new MongoRegex("expression", "img"); + Assert.AreEqual(MongoRegexOption.IgnoreCase | MongoRegexOption.IgnorePatternWhitespace | MongoRegexOption.Multiline, regex.Options); + } + + [Test] + public void CanSetOptions() + { + var regex = new MongoRegex("expression", null) + { + Options = MongoRegexOption.IgnoreCase | MongoRegexOption.IgnorePatternWhitespace + }; + + Assert.AreEqual("ig",regex.RawOptions); + } + + [Test] + public void CanBeBinarySerialized() + { + var source = new MongoRegex("exp", "opt"); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (MongoRegex)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new MongoRegex("exp", "opt"); + var serializer = new XmlSerializer(typeof(MongoRegex)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (MongoRegex)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerializedWhenNullPropertys() + { + var source = new MongoRegex(null, null); + var serializer = new XmlSerializer(typeof(MongoRegex)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (MongoRegex)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBuildNetRegex() + { + var regex = new MongoRegex("expression", MongoRegexOption.IgnoreCase|MongoRegexOption.IgnorePatternWhitespace|MongoRegexOption.Multiline); + var netRegex = regex.BuildRegex(); + + Assert.IsNotNull(netRegex); + Assert.AreEqual("expression",netRegex.ToString()); + Assert.AreEqual(RegexOptions.IgnorePatternWhitespace | RegexOptions.IgnoreCase | RegexOptions.Multiline, netRegex.Options); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestMongoServerEndPoint.cs b/source/MongoDB.Tests/UnitTests/TestMongoServerEndPoint.cs new file mode 100644 index 00000000..eebbabaa --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestMongoServerEndPoint.cs @@ -0,0 +1,39 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestMongoServerEndPoint + { + [Test] + public void CanBeBinarySerialized() + { + var source = new MongoServerEndPoint("myserver", 12345); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (MongoServerEndPoint)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new MongoServerEndPoint("myserver", 12345); + var serializer = new XmlSerializer(typeof(MongoServerEndPoint)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (MongoServerEndPoint)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestMongoSymbol.cs b/source/MongoDB.Tests/UnitTests/TestMongoSymbol.cs new file mode 100644 index 00000000..ebee4473 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestMongoSymbol.cs @@ -0,0 +1,164 @@ +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Text; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestMongoSymbol + { + [Test] + public void TestComparing(){ + var a = new MongoSymbol("a"); + var a2 = new MongoSymbol("a"); + var b = new MongoSymbol("b"); + + Assert.AreEqual(0, a.CompareTo(a2)); + Assert.AreEqual(0, a2.CompareTo(a2)); + + Assert.AreEqual(-1, a.CompareTo(b)); + Assert.AreEqual(1, b.CompareTo(a)); + } + + [Test] + public void TestEmpty(){ + var empty = MongoSymbol.Empty; + Assert.IsTrue(MongoSymbol.IsEmpty(empty)); + Assert.IsFalse(MongoSymbol.IsEmpty(new MongoSymbol("a"))); + } + + [Test] + public void TestEqOperator(){ + var a = new MongoSymbol("a"); + var a2 = new MongoSymbol("a"); + const string astr = "a"; + + var b = new MongoSymbol("b"); + const string bstr = "b"; + + Assert.IsTrue(a == a); + Assert.IsTrue(a == a2); + Assert.IsTrue(a2 == a); + Assert.IsTrue(a == astr); + Assert.IsTrue(astr == a); + + Assert.IsTrue(a == new StringBuilder().Append('a').ToString()); //Not interned like the hard coded ones above. + + Assert.IsFalse(a == b); + Assert.IsFalse(a == bstr); + Assert.IsFalse(bstr == a); + + Assert.IsFalse(a == null); + } + + [Test] + public void TestEquals(){ + var a = new MongoSymbol("a"); + var a2 = new MongoSymbol("a"); + const string astr = "a"; + + var b = new MongoSymbol("b"); + const string bstr = "b"; + + Assert.IsTrue(a.Equals(a2)); + Assert.IsTrue(a2.Equals(a)); + Assert.IsTrue(a.Equals(astr)); + + Assert.IsFalse(a.Equals(b)); + Assert.IsFalse(b.Equals(a)); + Assert.IsFalse(a.Equals(bstr)); + } + + [Test] + public void TestNotEqOperator(){ + var a = new MongoSymbol("a"); + var a2 = new MongoSymbol("a"); + const string astr = "a"; + + var b = new MongoSymbol("b"); + const string bstr = "b"; + + Assert.IsFalse(a != a); + Assert.IsFalse(a != a2); + Assert.IsFalse(a2 != a); + Assert.IsFalse(a != astr); + Assert.IsFalse(astr != a); + + Assert.IsTrue(a != b); + Assert.IsTrue(a != bstr); + Assert.IsTrue(bstr != a); + + Assert.IsTrue(a != null); + } + + [Test] + public void TestToString(){ + const string val = "symbol"; + var sym = new MongoSymbol(val); + + string str = sym; + Assert.AreEqual(val, str); + Assert.IsTrue(str == sym); + Assert.AreEqual(str, sym.ToString()); + } + + [Test] + public void TestValue(){ + Assert.IsTrue(string.IsInterned("s") != null); + + var s = new MongoSymbol("s"); + Assert.IsNotNull(s.Value); + Assert.IsTrue(string.IsInterned(s.Value) != null, "First value was not interned"); + + var val = new StringBuilder().Append('s').ToString(); + Assert.IsFalse(string.IsInterned(val) == null); + var s2 = new MongoSymbol(val); + Assert.IsTrue(string.IsInterned(s2.Value) != null, "Second value was not interned"); + + Assert.IsTrue(ReferenceEquals(s.Value, s2.Value)); + } + + [Test] + public void CanBeBinarySerialized() + { + var source = new MongoSymbol("value"); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (MongoSymbol)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = new MongoSymbol("value"); + var serializer = new XmlSerializer(typeof(MongoSymbol)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (MongoSymbol)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerializedWhenValueNull() + { + var source = new MongoSymbol(null); + var serializer = new XmlSerializer(typeof(MongoSymbol)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (MongoSymbol)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + } +} \ No newline at end of file diff --git a/source/MongoDB.Tests/UnitTests/TestOid.cs b/source/MongoDB.Tests/UnitTests/TestOid.cs new file mode 100644 index 00000000..768068a9 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestOid.cs @@ -0,0 +1,260 @@ +using System; +using System.IO; +using System.Runtime.Serialization.Formatters.Binary; +using System.Xml.Serialization; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestOid + { + [Test] + public void CanBeBinarySerialized() + { + var source = Oid.NewOid(); + var formatter = new BinaryFormatter(); + + var mem = new MemoryStream(); + formatter.Serialize(mem, source); + mem.Position = 0; + + var dest = (Oid)formatter.Deserialize(mem); + + Assert.AreEqual(source, dest); + } + + [Test] + public void CanBeXmlSerialized() + { + var source = Oid.NewOid(); + var serializer = new XmlSerializer(typeof(Oid)); + + var writer = new StringWriter(); + serializer.Serialize(writer, source); + var dest = (Oid)serializer.Deserialize(new StringReader(writer.ToString())); + + Assert.AreEqual(source, dest); + } + + [Test] + public void TestCtor() + { + var thrown = false; + try + { + new Oid("4a7067c30a57000000008ecb"); + } + catch(ArgumentException) + { + thrown = true; + } + Assert.IsFalse(thrown, "ID should be fine."); + } + + [Test] + public void TestDate() + { + const string hex = "4B458B95D114BE541B000000"; + var oid = new Oid(hex); + //Expected: 2010-01-07 02:24:56.633 + var expected = new DateTime(2010, 1, 7, 7, 21, 57, DateTimeKind.Utc); + Assert.AreEqual(expected, oid.Created); + } + + [Test] + public void TestDecode() + { + const string hex = "4a7067c30a57000000008ecb"; + var oid = new Oid(hex); + + Assert.AreEqual(hex, oid.ToString()); + } + + [Test] + public void TestEquals() + { + const string hex = "4a7067c30a57000000008ecb"; + Assert.AreEqual(new Oid(hex), new Oid(hex)); + } + + [Test] + public void TestFormatJ() + { + const string hex = "4a7067c30a57000000008ecb"; + var oid = new Oid(hex); + + Assert.AreEqual("\"" + hex + "\"", oid.ToString("J")); + } + + [Test] + public void TestIdCharacters() + { + var thrown = false; + try + { + new Oid("BADBOYc30a57000000008ecb"); + } + catch(Exception) + { + thrown = true; + } + Assert.IsTrue(thrown, "No invalid characters exception thrown"); + } + + [Test] + public void TestIdLength() + { + var thrown = false; + try + { + new Oid("BAD0"); + } + catch(Exception) + { + thrown = true; + } + Assert.IsTrue(thrown, "No length exception thrown"); + } + + [Test] + public void TestNewOidFromToString() + { + const string hex = "4B458B95D114BE541B000000"; + var firstOid = new Oid(hex); + var secondOid = new Oid(firstOid.ToString()); + + Assert.AreEqual(firstOid.ToString(), secondOid.ToString()); + } + + [Test] + public void TestNotEquals() + { + const string hex = "4a7067c30a57000000008ecb"; + const string hex2 = "4a7067c30a57000000008ecc"; + Assert.AreNotEqual(new Oid(hex), new Oid(hex2)); + } + + [Test] + [ExpectedException(typeof(ArgumentException))] + public void TestNullValue() + { + new Oid(String.Empty); + } + + [Test] + public void TestOidCanBeSerialized() + { + var serializer = new BinaryFormatter(); + + var oidSource = Oid.NewOid(); + Oid oidDesc; + using(var mem = new MemoryStream()) + { + serializer.Serialize(mem, oidSource); + mem.Position = 0; + oidDesc = (Oid)serializer.Deserialize(mem); + } + + Assert.AreEqual(oidSource, oidDesc); + } + + [Test] + public void TestOidComparisons() + { + var lower = new Oid("4a7067c30a57000000008ecb"); + var higher = new Oid("5a7067c30a57000000008ecb"); + + Assert.AreEqual(1, lower.CompareTo(null)); + Assert.AreEqual(1, higher.CompareTo(lower)); + + Assert.IsTrue(lower < higher); + Assert.IsTrue(higher > lower); + } + + [Test] + public void TestOidEquality() + { + var val = new Oid("4a7067c30a57000000008ecb"); + var other = new Oid("4a7067c30a57000000008ecb"); + + Assert.IsTrue(val.Equals(other), "Equals(Oid) did not work"); + Assert.IsTrue(val == other, "== operator did not work"); + Assert.IsTrue(val == val, "Objects should be equal to itself."); + } + + [Test] + public void TestOidEqualityToNull() + { + var val = Oid.NewOid(); + Oid other = null; + Oid other2 = null; + Assert.AreNotEqual(val, other); + Assert.AreNotEqual(other, val); + Assert.IsTrue(other == other2); + } + + [Test] + public void TestOidFromBytes() + { + var bytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + var hex = "0102030405060708090a0b0c"; + + var bval = new Oid(bytes); + var sval = new Oid(hex); + Assert.AreEqual(bval, sval); + } + + [Test] + public void TestOidInEquality() + { + var val = new Oid("4a7067c30a57000000008ecb"); + var other = new Oid("5a7067c30a57000000008ecb"); + Oid nilo = null; + + Assert.IsFalse(val == null); + Assert.IsFalse(nilo == val); + Assert.IsFalse(val == nilo); + Assert.IsFalse(val == other); + Assert.IsFalse(val.Equals(other)); + Assert.IsTrue(val != null); + Assert.IsTrue(val != other); + } + + [Test] + public void TestQuoteCharacters() + { + var val = new Oid(@"""4a7067c30a57000000008ecb"""); + try + { + new Oid(val.ToString()); + } + catch(ArgumentException) + { + Assert.Fail("Creating an Oid from the json representation should not fail."); + } + } + + [Test] + public void TestToByteArray() + { + var bytes = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + + var bval = new Oid(bytes); + var bytes2 = bval.ToByteArray(); + + Assert.IsNotNull(bytes2); + Assert.AreEqual(12, bytes2.Length); + Assert.AreEqual(bytes, bytes2); + } + + [Test] + public void TestCopyConstructor() + { + var source = Oid.NewOid(); + var dest = new Oid(source); + + Assert.AreEqual(source,dest); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/TestOidGenerator.cs b/source/MongoDB.Tests/UnitTests/TestOidGenerator.cs similarity index 92% rename from MongoDB.Net-Tests/TestOidGenerator.cs rename to source/MongoDB.Tests/UnitTests/TestOidGenerator.cs index 338fbab0..747daa6e 100644 --- a/MongoDB.Net-Tests/TestOidGenerator.cs +++ b/source/MongoDB.Tests/UnitTests/TestOidGenerator.cs @@ -1,9 +1,8 @@ - - using System; +using MongoDB.Util; using NUnit.Framework; -namespace MongoDB.Driver +namespace MongoDB.UnitTests { [TestFixture] public class TestOidGenerator diff --git a/source/MongoDB.Tests/UnitTests/TestOp.cs b/source/MongoDB.Tests/UnitTests/TestOp.cs new file mode 100644 index 00000000..5bb4b684 --- /dev/null +++ b/source/MongoDB.Tests/UnitTests/TestOp.cs @@ -0,0 +1,150 @@ +using MongoDB.Bson; +using NUnit.Framework; + +namespace MongoDB.UnitTests +{ + [TestFixture] + public class TestOp + { + [Test] + public void ConjunctionsWith2Operators() + { + var op = Op.GreaterThan(10) & Op.LessThan(20); + + Assert.AreEqual(10, op["$gt"]); + Assert.AreEqual(20, op["$lt"]); + } + + [Test] + public void ConjunctionsWith3Operators() + { + var op = Op.GreaterThan(10) & Op.LessThan(20) & Op.Mod(3, 1); + + Assert.AreEqual(10, op["$gt"]); + Assert.AreEqual(20, op["$lt"]); + Assert.AreEqual(new [] { 3, 1 }, op["$mod"]); + } + + [Test] + public void Negation() + { + var op = !Op.GreaterThan(10); + + var negated = (Op)op["$not"]; + + Assert.AreEqual(10, negated["$gt"]); + } + + [Test] + public void GreaterThan() + { + var op = Op.GreaterThan(10); + + Assert.AreEqual(10, op["$gt"]); + } + + [Test] + public void GreaterThanOrEqual() + { + var op = Op.GreaterThanOrEqual(10); + + Assert.AreEqual(10, op["$gte"]); + } + + [Test] + public void LessThan() + { + var op = Op.LessThan(10); + + Assert.AreEqual(10, op["$lt"]); + } + + [Test] + public void LessThanOrEqual() + { + var op = Op.LessThanOrEqual(10); + + Assert.AreEqual(10, op["$lte"]); + } + + [Test] + public void NotEqual() + { + var op = Op.NotEqual(10); + + Assert.AreEqual(10, op["$ne"]); + } + + [Test] + public void In() + { + var op = Op.In(10,11,12); + + Assert.AreEqual(new[] { 10, 11, 12 }, op["$in"]); + } + + [Test] + public void NotIn() + { + var op = Op.NotIn(10, 11, 12); + + Assert.AreEqual(new[] { 10, 11, 12 }, op["$nin"]); + } + + [Test] + public void All() + { + var op = Op.All(10, 11, 12); + + Assert.AreEqual(new[] { 10, 11, 12 }, op["$all"]); + } + + [Test] + public void Mod() + { + var op = Op.Mod(10, 1); + + Assert.AreEqual(new[] { 10, 1 }, op["$mod"]); + } + + [Test] + public void Size() + { + var op = Op.Size(10); + + Assert.AreEqual(10, op["$size"]); + } + + [Test] + public void Exists() + { + var op = Op.Exists(); + + Assert.AreEqual(true, op["$exists"]); + } + + [Test] + public void NotExists() + { + var op = Op.NotExists(); + + Assert.AreEqual(false, op["$exists"]); + } + + [Test] + public void Type() + { + var op = Op.Type(BsonType.Boolean); + + Assert.AreEqual((int)BsonType.Boolean, op["$type"]); + } + + [Test] + public void Where() + { + var op = Op.Where("return this.a == 3 || this.b == 4;"); + + Assert.AreEqual(new Code("return this.a == 3 || this.b == 4;"),op["$where"]); + } + } +} \ No newline at end of file diff --git a/MongoDB.Net-Tests/Util/TestJsonUtils.cs b/source/MongoDB.Tests/UnitTests/Util/TestJsonUtils.cs similarity index 84% rename from MongoDB.Net-Tests/Util/TestJsonUtils.cs rename to source/MongoDB.Tests/UnitTests/Util/TestJsonUtils.cs index b397cb2a..d035ed07 100644 --- a/MongoDB.Net-Tests/Util/TestJsonUtils.cs +++ b/source/MongoDB.Tests/UnitTests/Util/TestJsonUtils.cs @@ -1,13 +1,11 @@ - using System; using System.Globalization; using System.Threading; -using NUnit.Framework; - -namespace MongoDB.Driver.Util +using MongoDB.Util; +using NUnit.Framework; + +namespace MongoDB.UnitTests.Util { - - [TestFixture] public class TestJsonUtils { @@ -38,26 +36,26 @@ public void TestNonPrintableUnicode(){ } [Test] - public void TestSerializeDocWithSingleNullField() { - var doc = new Document().Append("foo", null); + public void TestSerializeDocWithSingleNullField() { + var doc = new Document().Add("foo", null); Assert.AreEqual(@"{ ""foo"": null }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSingleTrueField() { - var doc = new Document().Append("foo", true); + public void TestSerializeDocWithSingleTrueField() { + var doc = new Document().Add("foo", true); Assert.AreEqual(@"{ ""foo"": true }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSingleFalseField() { - var doc = new Document().Append("foo", false); + public void TestSerializeDocWithSingleFalseField() { + var doc = new Document().Add("foo", false); Assert.AreEqual(@"{ ""foo"": false }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSingleStringField() { - var doc = new Document().Append("foo", "bar"); + public void TestSerializeDocWithSingleStringField() { + var doc = new Document().Add("foo", "bar"); Assert.AreEqual(@"{ ""foo"": ""bar"" }", JsonFormatter.Serialize(doc)); } @@ -73,14 +71,14 @@ public void TestSerializeDocWithHighUnicodeValues(){ } [Test] - public void TestSerializeDocWithSingleIntField() { - var doc = new Document().Append("foo", 10); + public void TestSerializeDocWithSingleIntField() { + var doc = new Document().Add("foo", 10); Assert.AreEqual(@"{ ""foo"": 10 }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSingleDoubleField() { - var doc = new Document().Append("foo", 10.1); + public void TestSerializeDocWithSingleDoubleField() { + var doc = new Document().Add("foo", 10.1); Assert.AreEqual(@"{ ""foo"": 10.1 }", JsonFormatter.Serialize(doc)); } @@ -89,46 +87,46 @@ public void TestSerializeCultureInvariantNumbers() { var cultureBackup = Thread.CurrentThread.CurrentCulture; Thread.CurrentThread.CurrentCulture = new CultureInfo("de-DE"); - var doc = new Document().Append("foo", 10.1); + var doc = new Document().Add("foo", 10.1); Assert.AreEqual(@"{ ""foo"": 10.1 }", JsonFormatter.Serialize(doc)); Thread.CurrentThread.CurrentCulture = cultureBackup; } [Test] - public void TestSerializeDocWithSingleDateTimeField() { - var doc = new Document().Append("foo", DateTime.Parse("2009-10-10T07:00:00.0000000Z")); + public void TestSerializeDocWithSingleDateTimeField() { + var doc = new Document().Add("foo", DateTime.Parse("2009-10-10T07:00:00.0000000Z")); Assert.AreEqual(@"{ ""foo"": ""2009-10-10T07:00:00.0000000Z"" }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSingleOidField() { - var doc = new Document().Append("foo", new Oid("4ac7ee91f693066f1c96b649")); + public void TestSerializeDocWithSingleOidField() { + var doc = new Document().Add("foo", new Oid("4ac7ee91f693066f1c96b649")); Assert.AreEqual(@"{ ""foo"": ""4ac7ee91f693066f1c96b649"" }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithMultipleFields() { - var doc = new Document().Append("foo", "bar").Append("baz", 42); + public void TestSerializeDocWithMultipleFields() { + var doc = new Document().Add("foo", "bar").Add("baz", 42); Assert.AreEqual(@"{ ""foo"": ""bar"", ""baz"": 42 }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithSubDocField() { - var doc = new Document().Append("foo", "bar").Append("baz", new Document().Append("a", 1)); + public void TestSerializeDocWithSubDocField() { + var doc = new Document().Add("foo", "bar").Add("baz", new Document().Add("a", 1)); Assert.AreEqual(@"{ ""foo"": ""bar"", ""baz"": { ""a"": 1 } }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithArrayOfInts() { - var doc = new Document().Append("foo", new[] {1,2,3,4}); + public void TestSerializeDocWithArrayOfInts() { + var doc = new Document().Add("foo", new[] { 1, 2, 3, 4 }); Assert.AreEqual(@"{ ""foo"": [ 1, 2, 3, 4 ] }", JsonFormatter.Serialize(doc)); } [Test] - public void TestSerializeDocWithArrayOfDocs() { - var doc = new Document().Append("foo", new[] { - new Document().Append("a", 1), - new Document().Append("b", 2), - new Document().Append("c", 3), + public void TestSerializeDocWithArrayOfDocs() { + var doc = new Document().Add("foo", new[] { + new Document().Add("a", 1), + new Document().Add("b", 2), + new Document().Add("c", 3), }); Assert.AreEqual(@"{ ""foo"": [ { ""a"": 1 }, { ""b"": 2 }, { ""c"": 3 } ] }", JsonFormatter.Serialize(doc)); } @@ -136,7 +134,7 @@ public void TestSerializeDocWithArrayOfDocs() { [Test] public void TestSerializeDocWithBinary(){ var doc = new Document(){{"b", new Binary(){Bytes = new byte[]{0,1,2,3,4}, - Subtype = Binary.TypeCode.General}}}; + Subtype = BinarySubtype.General}}}; Assert.AreEqual(@"{ ""b"": { ""$binary"": ""AAECAwQ="", ""$type"" : 2 } }", JsonFormatter.Serialize(doc)); } diff --git a/MongoDBDriver/AssemblyInfo.cs b/source/MongoDB/AssemblyInfo.cs similarity index 58% rename from MongoDBDriver/AssemblyInfo.cs rename to source/MongoDB/AssemblyInfo.cs index fc12c279..bf7eae8e 100644 --- a/MongoDBDriver/AssemblyInfo.cs +++ b/source/MongoDB/AssemblyInfo.cs @@ -3,34 +3,21 @@ using System.Runtime.CompilerServices; using System.Security.Permissions; - // Information about this assembly is defined by the following attributes. -// Change them to the values specific to your project. - -[assembly: AssemblyTitle("MongoDBDriver")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("")] -[assembly: AssemblyCopyright("")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". -// The form "{Major}.{Minor}.*" will automatically update the build and revision, -// and "{Major}.{Minor}.{Build}.*" will update just the revision. - -[assembly: AssemblyVersion("1.0.*")] +// Change them to the values specific to your project. + +[assembly: AssemblyTitle("MongoDB")] +[assembly: AssemblyDescription("MongoDB-CSharp driver for C#")] +[assembly: AssemblyProduct("MongoDB-CSharp")] // The following attributes are used to specify the signing key for the assembly, // if desired. See the Mono documentation for more information about signing. -[assembly: AssemblyDelaySign(false)] -[assembly: AssemblyKeyFile("")] - [assembly: System.Runtime.InteropServices.ComVisible(false)] [assembly: CLSCompliantAttribute(true)] [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Bson")] [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Bson")] -[assembly: SecurityPermission(SecurityAction.RequestMinimum, Execution = true)] +[assembly: SecurityPermission(SecurityAction.RequestMinimum, Execution = true)] + +[assembly: InternalsVisibleTo("MongoDB.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100ed9e936c4563336be2e14ca802ea727ff49cad3bb1c0b287beed2a9b5eb823c4c44becc80be4bb11dcd7e49d5d6171f68b488853dcbdeb3152ea3db95ba13a70855a715ee21ac76b67f50bcbc93f2e29e409530a00b98fa79b06ac008dd1f4f3582ba6746af3d218b43b70a63254b094be1a2d493590837273f357fc56b2a7a0")] diff --git a/source/MongoDB/Attributes/MongoAliasAttribute.cs b/source/MongoDB/Attributes/MongoAliasAttribute.cs new file mode 100644 index 00000000..a9438a19 --- /dev/null +++ b/source/MongoDB/Attributes/MongoAliasAttribute.cs @@ -0,0 +1,30 @@ +using System; + +namespace MongoDB.Attributes +{ + /// + /// + /// + [AttributeUsage(AttributeTargets.Property)] + public sealed class MongoAliasAttribute : Attribute + { + /// + /// Gets or sets the name. + /// + /// The name. + public string Name { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// The name. + public MongoAliasAttribute(string name){ + if(name == null) + throw new ArgumentNullException("name"); + if (name == "_id") + throw new ArgumentException("_id is a reserved alias."); + + Name = name; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Attributes/MongoDefaultAttribute.cs b/source/MongoDB/Attributes/MongoDefaultAttribute.cs new file mode 100644 index 00000000..18f130e6 --- /dev/null +++ b/source/MongoDB/Attributes/MongoDefaultAttribute.cs @@ -0,0 +1,43 @@ +using System; + +namespace MongoDB.Attributes +{ + /// + /// + /// + [AttributeUsage(AttributeTargets.Property)] + public sealed class MongoDefaultAttribute : Attribute + { + + /// + /// Initializes a new instance of the class. + /// + /// The value. + public MongoDefaultAttribute(object value) + : this(value, true) + { } + + /// + /// Initializes a new instance of the class. + /// + /// The value. + /// if set to true [persist default value]. + public MongoDefaultAttribute(object value, bool persistDefaultValue) + { + Value = value; + PersistDefaultValue = persistDefaultValue; + } + + /// + /// Gets or sets a value indicating whether the default value should be persisted. + /// + /// true if [persist default value]; otherwise, false. + public bool PersistDefaultValue { get; private set; } + + /// + /// Gets or sets the value. + /// + /// The value. + public object Value { get; private set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Attributes/MongoIdAttribute.cs b/source/MongoDB/Attributes/MongoIdAttribute.cs new file mode 100644 index 00000000..15e92eb3 --- /dev/null +++ b/source/MongoDB/Attributes/MongoIdAttribute.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Attributes +{ + /// + /// + /// + [AttributeUsage(AttributeTargets.Property)] + public sealed class MongoIdAttribute : Attribute + { + /// + /// Initializes a new instance of the class. + /// + public MongoIdAttribute() + { } + } +} \ No newline at end of file diff --git a/source/MongoDB/Attributes/MongoIgnoreAttribute.cs b/source/MongoDB/Attributes/MongoIgnoreAttribute.cs new file mode 100644 index 00000000..34281843 --- /dev/null +++ b/source/MongoDB/Attributes/MongoIgnoreAttribute.cs @@ -0,0 +1,12 @@ +using System; + +namespace MongoDB.Attributes +{ + /// + /// + /// + [AttributeUsage(AttributeTargets.Property)] + public sealed class MongoIgnoreAttribute : Attribute + { + } +} \ No newline at end of file diff --git a/source/MongoDB/Binary.cs b/source/MongoDB/Binary.cs new file mode 100644 index 00000000..ec931eb2 --- /dev/null +++ b/source/MongoDB/Binary.cs @@ -0,0 +1,248 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; + +namespace MongoDB +{ + /// + /// + [Serializable] + public sealed class Binary : IEquatable, ICloneable, IEnumerable, IXmlSerializable + { + /// + /// Initializes a new instance of the class. + /// + public Binary() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The value. + public Binary(byte[] bytes) + { + Bytes = bytes; + Subtype = BinarySubtype.General; + } + + /// + /// Initializes a new instance of the class. + /// + /// The bytes. + /// The subtype. + public Binary(byte[] bytes, BinarySubtype subtype) + { + Bytes = bytes; + Subtype = subtype; + } + + /// + /// Gets or sets the bytes. + /// + /// The bytes. + public byte[] Bytes { get; set; } + + /// + /// Gets or sets the subtype. + /// + /// The subtype. + public BinarySubtype Subtype { get; set; } + + /// + /// Creates a new object that is a copy of the current instance. + /// + /// + /// A new object that is a copy of this instance. + /// + public object Clone() + { + return new Binary(Bytes) {Subtype = Subtype}; + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + public IEnumerator GetEnumerator() + { + if(Bytes == null) + yield break; + + foreach(var b in Bytes) + yield return b; + } + + /// + /// Performs an implicit conversion from to . + /// + /// The binary. + /// The result of the conversion. + public static implicit operator byte[](Binary binary) + { + if(binary == null) + throw new ArgumentNullException("binary"); + + return binary.Bytes; + } + + /// + /// Performs an implicit conversion from to . + /// + /// The bytes. + /// The result of the conversion. + public static implicit operator Binary(byte[] bytes) + { + if(bytes == null) + throw new ArgumentNullException("bytes"); + + return new Binary(bytes); + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(Binary left, Binary right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(Binary left, Binary right) + { + return !Equals(left, right); + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(Binary other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + if(!Equals(other.Subtype, Subtype)) + return false; + if(Bytes != null && other.Bytes != null) + return Bytes.SequenceEqual(other.Bytes); + return Equals(Bytes, other.Bytes); + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(Binary) && Equals((Binary)obj); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + unchecked + { + return ((Bytes != null ? Bytes.GetHashCode() : 0)*397) ^ Subtype.GetHashCode(); + } + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return String.Format(@"{{ ""$binary"": ""{0}"", ""$type"" : {1} }}", + Convert.ToBase64String(Bytes??new byte[0]), + (int)Subtype); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + reader.MoveToAttribute("subtype"); + Subtype = (BinarySubtype)Enum.Parse(typeof(BinarySubtype), reader.Value); + + reader.MoveToElement(); + + if(reader.IsEmptyElement) + return; + + var content = reader.ReadElementContentAsString(); + if(content != null) + Bytes = Convert.FromBase64String(content); + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + writer.WriteAttributeString("subtype",Subtype.ToString()); + if(Bytes!=null) + writer.WriteBase64(Bytes,0,Bytes.Length); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/BinarySubtype.cs b/source/MongoDB/BinarySubtype.cs new file mode 100644 index 00000000..e7665971 --- /dev/null +++ b/source/MongoDB/BinarySubtype.cs @@ -0,0 +1,21 @@ +namespace MongoDB +{ + /// + /// + public enum BinarySubtype : byte + { + /// + /// + Unknown = 0, + /// + /// + General = 2, + // Uuid = 3 is now replaced by Guid + /// + /// + Md5 = 5, + /// + /// + UserDefined = 80 + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/BsonDocumentBuilder.cs b/source/MongoDB/Bson/BsonDocumentBuilder.cs new file mode 100644 index 00000000..27bd61fc --- /dev/null +++ b/source/MongoDB/Bson/BsonDocumentBuilder.cs @@ -0,0 +1,120 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Bson +{ + /// + /// + public class BsonDocumentBuilder : IBsonObjectBuilder + { + /// + /// Begins the object. + /// + /// + public object BeginObject() + { + return new Document(); + } + + /// + /// Ends the object. + /// + /// The instance. + /// + public object EndObject(object instance) + { + var document = (Document)instance; + + if(DBRef.IsDocumentDBRef(document)) + return DBRef.FromDocument(document); + + return document; + } + + /// + /// Begins the array. + /// + /// + public object BeginArray() + { + return BeginObject(); + } + + /// + /// Ends the array. + /// + /// The instance. + /// + public object EndArray(object instance) + { + var document = (Document)EndObject(instance); + return ConvertToArray(document); + } + + /// + /// Begins the property. + /// + /// The instance. + /// The name. + public void BeginProperty(object instance, string name) + { + } + + /// + /// Ends the property. + /// + /// The instance. + /// The name. + /// The value. + public void EndProperty(object instance, string name, object value) + { + var document = (Document)instance; + document.Add(name, value); + } + + /// + /// Gets the type for IEnumerable. + /// + /// The doc. + /// + private Type GetTypeForIEnumerable(IDictionary doc) + { + if(doc.Keys.Count < 1) + return typeof(Object); + + Type comp = null; + + foreach(var test in doc.Keys.Select(key => doc[key]) + .Select(obj => obj.GetType())) + { + if(comp == null) + comp = test; + else if(comp != test) + return typeof(Object); + } + + return comp; + } + + /// + /// Converts to array. + /// + /// The doc. + /// + private IEnumerable ConvertToArray(Document doc) + { + var genericListType = typeof(List<>); + var arrayType = GetTypeForIEnumerable(doc); + var listType = genericListType.MakeGenericType(arrayType); + + var list = (IList)Activator.CreateInstance(listType); + + foreach(var key in doc.Keys) + list.Add(doc[key]); + + return list; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/BsonDocumentDescriptor.cs b/source/MongoDB/Bson/BsonDocumentDescriptor.cs new file mode 100644 index 00000000..7facbe73 --- /dev/null +++ b/source/MongoDB/Bson/BsonDocumentDescriptor.cs @@ -0,0 +1,105 @@ +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Bson +{ + /// + /// + /// + public class BsonDocumentDescriptor : IBsonObjectDescriptor + { + /// + /// Begins the object. + /// + /// The instance. + /// + public object BeginObject(object instance){ + return instance; + } + + /// + /// Begins the array. + /// + /// The instance. + /// + public object BeginArray(object instance){ + var document = new Document(); + + var i = 0; + foreach(var item in (IEnumerable)instance) + document.Add((i++).ToString(), item); + + return document; + } + + /// + /// Gets the property names. + /// + /// The instance. + /// + public IEnumerable GetProperties(object instance) + { + var document = (Document)instance; + return document.Keys.Select(key => new BsonProperty(key)); + } + + /// + /// Begins the property. + /// + /// The instance. + /// The property. + /// + public void BeginProperty(object instance, BsonProperty property){ + var document = (Document)instance; + property.Value = document[property.Name]; + } + + /// + /// Ends the property. + /// + /// The instance. + /// The property. + public void EndProperty(object instance, BsonProperty property){ + } + + /// + /// Ends the array. + /// + /// The instance. + public void EndArray(object instance){ + } + + /// + /// Ends the object. + /// + /// The obj. + public void EndObject(object obj){ + } + + /// + /// Determines whether the specified obj is array. + /// + /// The obj. + /// + /// true if the specified obj is array; otherwise, false. + /// + public bool IsArray(object obj){ + if(obj is Document) + return false; + + return obj is IEnumerable; + } + + /// + /// Determines whether the specified obj is object. + /// + /// The obj. + /// + /// true if the specified obj is object; otherwise, false. + /// + public bool IsObject(object obj){ + return obj is Document; + } + } +} \ No newline at end of file diff --git a/MongoDBDriver/Bson/BsonInfo.cs b/source/MongoDB/Bson/BsonInfo.cs similarity index 87% rename from MongoDBDriver/Bson/BsonInfo.cs rename to source/MongoDB/Bson/BsonInfo.cs index 9dc1c081..acc12071 100644 --- a/MongoDBDriver/Bson/BsonInfo.cs +++ b/source/MongoDB/Bson/BsonInfo.cs @@ -1,7 +1,10 @@ using System; -namespace MongoDB.Driver.Bson +namespace MongoDB.Bson { + /// + /// + /// public static class BsonInfo { /// diff --git a/source/MongoDB/Bson/BsonProperty.cs b/source/MongoDB/Bson/BsonProperty.cs new file mode 100644 index 00000000..ed8273a5 --- /dev/null +++ b/source/MongoDB/Bson/BsonProperty.cs @@ -0,0 +1,28 @@ +namespace MongoDB.Bson +{ + /// + /// + /// + public class BsonProperty + { + /// + /// Initializes a new instance of the class. + /// + /// The name. + public BsonProperty(string name){ + Name = name; + } + + /// + /// Gets or sets the name. + /// + /// The name. + public string Name { get; set; } + + /// + /// Gets or sets the value. + /// + /// The value. + public object Value { get; set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/BsonReader.cs b/source/MongoDB/Bson/BsonReader.cs new file mode 100644 index 00000000..9c312a64 --- /dev/null +++ b/source/MongoDB/Bson/BsonReader.cs @@ -0,0 +1,412 @@ +using System; +using System.IO; +using System.Text; + +namespace MongoDB.Bson +{ + /// + /// Reads binary streams containing BSON data and converts them to native types. + /// + public class BsonReader + { + private const int MaxCharBytesSize = 128; + private readonly IBsonObjectBuilder _builder; + private readonly BinaryReader _reader; + private readonly byte[] _seqRange1 = new byte[]{0, 127}; //Range of 1-byte sequence + private readonly byte[] _seqRange2 = new byte[]{194, 223}; //Range of 2-byte sequence + private readonly byte[] _seqRange3 = new byte[]{224, 239}; //Range of 3-byte sequence + private readonly byte[] _seqRange4 = new byte[]{240, 244}; //Range of 4-byte sequence + private readonly Stream _stream; + private readonly bool _readLocalTime; + + private byte[] _byteBuffer; + private char[] _charBuffer; + + /// + /// Initializes a new instance of the class. + /// + /// The stream. + /// The settings. + public BsonReader(Stream stream, BsonReaderSettings settings) + { + if(settings == null) + throw new ArgumentNullException("settings"); + + _builder = settings.Builder; + _readLocalTime = settings.ReadLocalTime; + Position = 0; + _stream = stream; + _reader = new BinaryReader(_stream); + } + + /// + /// Initializes a new instance of the class. + /// + /// The stream. + /// The builder. + public BsonReader(Stream stream, IBsonObjectBuilder builder){ + _builder = builder; + Position = 0; + _stream = stream; + _reader = new BinaryReader(_stream); + } + + /// + /// Gets or sets the position. + /// + /// The position. + public int Position { get; private set; } + + /// + /// Reads this instance. + /// + /// + public Document Read(){ + Position = 0; + var doc = (Document)ReadObject(); + return doc; + } + + /// + /// Reads the object. + /// + /// + public object ReadObject(){ + var instance = _builder.BeginObject(); + ReadElements(instance); + return _builder.EndObject(instance); + } + + /// + /// Reads the array. + /// + /// + public object ReadArray(){ + var instance = _builder.BeginArray(); + ReadElements(instance); + return _builder.EndArray(instance); + } + + /// + /// Reads the elements. + /// + /// The instance. + private void ReadElements(object instance){ + var startPosition = Position; + var size = _reader.ReadInt32(); + Position += 4; + while((Position - startPosition) + 1 < size) + ReadElement(instance); + Position++; + if(_reader.ReadByte() != 0) + throw new InvalidDataException("Document not null terminated"); + if(size != Position - startPosition) + throw new InvalidDataException(string.Format("Should have read {0} bytes from stream but only read {1}", + size, + (Position - startPosition))); + } + + /// + /// Reads the element. + /// + /// The instance. + private void ReadElement(object instance){ + Position++; + var typeNumber = (sbyte)_reader.ReadByte(); + var key = ReadString(); + _builder.BeginProperty(instance, key); + var element = ReadElementType(typeNumber); + _builder.EndProperty(instance, key, element); + } + + /// + /// Reads the type of the element. + /// + /// The type number. + /// + public Object ReadElementType(int typeNumber){ + switch((BsonType)typeNumber){ + case BsonType.Null: + case BsonType.Undefined: + return null; + case BsonType.MinKey: + return MongoMinKey.Value; + case BsonType.MaxKey: + return MongoMaxKey.Value; + case BsonType.Boolean: + Position++; + return _reader.ReadBoolean(); + case BsonType.Integer: + Position += 4; + return _reader.ReadInt32(); + case BsonType.Long: + Position += 8; + return _reader.ReadInt64(); + case BsonType.Date: + return ReadDateTime(); + case BsonType.Oid: + Position += 12; + return new Oid(_reader.ReadBytes(12)); + case BsonType.Number: + Position += 8; + return _reader.ReadDouble(); + case BsonType.String: + return ReadLengthString(); + case BsonType.Symbol: + return new MongoSymbol(ReadLengthString()); + case BsonType.Obj: + return ReadObject(); + case BsonType.Array: + return ReadArray(); + case BsonType.Regex: + return ReadRegex(); + case BsonType.Code: + return ReadCode(); + case BsonType.CodeWScope: + return ReadScope(); + case BsonType.Binary: + return ReadBinary(); + default: + throw new ArgumentOutOfRangeException(String.Format("Type Number: {0} not recognized", typeNumber)); + } + } + + /// + /// Reads the date time. + /// + /// + private object ReadDateTime(){ + Position += 8; + var milliseconds = _reader.ReadInt64(); + var time = BsonInfo.Epoch.AddMilliseconds(milliseconds); + if(_readLocalTime) + time = time.ToLocalTime(); + return time; + } + + /// + /// Reads the string. + /// + /// + public string ReadString(){ + EnsureBuffers(); + + var builder = new StringBuilder(); + var offset = 0; + do{ + var count = offset; + byte readByte = 0; + + while(count < MaxCharBytesSize && (readByte = _reader.ReadByte()) > 0) + _byteBuffer[count++] = readByte; + + var byteCount = count - offset; + Position += byteCount; + + if(count == 0) + break; //first byte read was the terminator. + + var lastFullCharStop = GetLastFullCharStop(count - 1); + + var charCount = Encoding.UTF8.GetChars(_byteBuffer, 0, lastFullCharStop + 1, _charBuffer, 0); + builder.Append(_charBuffer, 0, charCount); + + if(lastFullCharStop < byteCount - 1){ + offset = byteCount - lastFullCharStop - 1; + //Copy end bytes to begining + Array.Copy(_byteBuffer, lastFullCharStop + 1, _byteBuffer, 0, offset); + } + else + offset = 0; + + if(readByte == 0) + break; + } + while(true); + Position++; + return builder.ToString(); + } + + /// + /// Reads the length string. + /// + /// + public string ReadLengthString(){ + var length = _reader.ReadInt32(); + var str = GetString(length - 1); + _reader.ReadByte(); + + Position += (4 + 1); + return str; + } + + /// + /// Gets the string. + /// + /// The length. + /// + private string GetString(int length){ + if(length == 0) + return string.Empty; + + EnsureBuffers(); + + var builder = new StringBuilder(length); + + var totalBytesRead = 0; + var offset = 0; + do{ + var count = ((length - totalBytesRead) > MaxCharBytesSize - offset) + ? (MaxCharBytesSize - offset) + : + (length - totalBytesRead); + + var byteCount = _reader.BaseStream.Read(_byteBuffer, offset, count); + totalBytesRead += byteCount; + byteCount += offset; + + var lastFullCharStop = GetLastFullCharStop(byteCount - 1); + + if(byteCount == 0) + throw new EndOfStreamException("Unable to read beyond the end of the stream."); + + var charCount = Encoding.UTF8.GetChars(_byteBuffer, 0, lastFullCharStop + 1, _charBuffer, 0); + builder.Append(_charBuffer, 0, charCount); + + if(lastFullCharStop < byteCount - 1){ + offset = byteCount - lastFullCharStop - 1; + //Copy end bytes to begining + Array.Copy(_byteBuffer, lastFullCharStop + 1, _byteBuffer, 0, offset); + } + else + offset = 0; + } + while(totalBytesRead < length); + + Position += totalBytesRead; + return builder.ToString(); + } + + /// + /// Reads the scope. + /// + /// + private object ReadScope(){ + var startpos = Position; + var size = _reader.ReadInt32(); + Position += 4; + + var val = ReadLengthString(); + var scope = (Document)ReadObject(); + if(size != Position - startpos) + throw new InvalidDataException(string.Format("Should have read {0} bytes from stream but read {1} in CodeWScope", + size, + Position - startpos)); + + return new CodeWScope(val, scope); + } + + /// + /// Reads the code. + /// + /// + private object ReadCode(){ + return new Code{Value = ReadLengthString()}; + } + + /// + /// Reads the regex. + /// + /// + private object ReadRegex(){ + return new MongoRegex{ + Expression = ReadString(), + RawOptions = ReadString() + }; + } + + /// + /// Reads the binary. + /// + /// + private object ReadBinary(){ + var size = _reader.ReadInt32(); + Position += 4; + var subtype = _reader.ReadByte(); + Position ++; + if(subtype == (byte)BinarySubtype.General){ + size = _reader.ReadInt32(); + Position += 4; + } + var bytes = _reader.ReadBytes(size); + Position += size; + + // From http://en.wikipedia.org/wiki/Universally_Unique_Identifier + // The most widespread use of this standard is in Microsoft's Globally Unique Identifiers (GUIDs). + if(subtype == 3 && 16 == size) + return new Guid(bytes); + + return new Binary{ + Bytes = bytes, + Subtype = (BinarySubtype)subtype + }; + } + + /// + /// Gets the last full char stop. + /// + /// The start. + /// + private int GetLastFullCharStop(int start){ + var lookbackPos = start; + var bis = 0; + + while(lookbackPos >= 0){ + bis = BytesInSequence(_byteBuffer[lookbackPos]); + if(bis == 0){ + lookbackPos--; + continue; + } + + if(bis == 1) + break; + + lookbackPos--; + break; + } + + return bis == start - lookbackPos ? start : lookbackPos; + } + + /// + /// Byteses the in sequence. + /// + /// The b. + /// + private int BytesInSequence(byte b){ + if(b <= _seqRange1[1]) + return 1; + if(b >= _seqRange2[0] && b <= _seqRange2[1]) + return 2; + if(b >= _seqRange3[0] && b <= _seqRange3[1]) + return 3; + if(b >= _seqRange4[0] && b <= _seqRange4[1]) + return 4; + return 0; + } + + /// + /// Ensures the buffers. + /// + private void EnsureBuffers(){ + if(_byteBuffer == null) + _byteBuffer = new byte[MaxCharBytesSize]; + if(_charBuffer != null) + return; + + var charBufferSize = Encoding.UTF8.GetMaxCharCount(MaxCharBytesSize); + + _charBuffer = new char[charBufferSize]; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/BsonReaderSettings.cs b/source/MongoDB/Bson/BsonReaderSettings.cs new file mode 100644 index 00000000..69d2c269 --- /dev/null +++ b/source/MongoDB/Bson/BsonReaderSettings.cs @@ -0,0 +1,44 @@ +using System; + +namespace MongoDB.Bson +{ + /// + /// + /// + public class BsonReaderSettings + { + /// + /// Initializes a new instance of the class. + /// + public BsonReaderSettings() + :this(new BsonDocumentBuilder()){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The builder. + public BsonReaderSettings(IBsonObjectBuilder builder){ + if(builder == null) + throw new ArgumentNullException("builder"); + + Builder = builder; + } + + /// + /// Gets or sets the builder. + /// + /// The builder. + public IBsonObjectBuilder Builder { get; private set; } + + /// + /// Reads DataTime from server as local time. + /// + /// true if [read local time]; otherwise, false. + /// + /// MongoDB stores all time values in UTC timezone. If true the + /// time is converted from UTC to local timezone after is was read. + /// + public bool ReadLocalTime { get; set; } + } +} \ No newline at end of file diff --git a/MongoDBDriver/Bson/BsonDataType.cs b/source/MongoDB/Bson/BsonType.cs similarity index 91% rename from MongoDBDriver/Bson/BsonDataType.cs rename to source/MongoDB/Bson/BsonType.cs index 5789a216..404a1a28 100644 --- a/MongoDBDriver/Bson/BsonDataType.cs +++ b/source/MongoDB/Bson/BsonType.cs @@ -1,7 +1,13 @@ -namespace MongoDB.Driver.Bson -{ - public enum BsonDataType:sbyte - { +namespace MongoDB.Bson +{ + /// + /// + /// + public enum BsonType + { + /// + /// + /// Number = 1, /// /// int32 @@ -67,7 +73,12 @@ public enum BsonDataType:sbyte /// be supported in BSON encoders/decoders, but has been deprecated in /// favor of data_code_w_scope /// - Code = 13, + Code = 13, + + /// + /// String + /// + Symbol = 14, /// /// int32 /// int32 cstring bson_object The first int32 is the total # of diff --git a/source/MongoDB/Bson/BsonWriter.cs b/source/MongoDB/Bson/BsonWriter.cs new file mode 100644 index 00000000..8996cd2a --- /dev/null +++ b/source/MongoDB/Bson/BsonWriter.cs @@ -0,0 +1,615 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Text.RegularExpressions; + +namespace MongoDB.Bson +{ + /// + /// Class that knows how to format a native object into bson bits. + /// + public class BsonWriter + { + private const int BufferLength = 256; + private readonly byte[] _buffer; + private readonly IBsonObjectDescriptor _descriptor; + private readonly int _maxChars; + private readonly Stream _stream; + private readonly BinaryWriter _writer; + + /// + /// Initializes a new instance of the class. + /// + /// The stream. + /// The settings. + public BsonWriter(Stream stream, BsonWriterSettings settings) + { + if(settings == null) + throw new ArgumentNullException("settings"); + _stream = stream; + _descriptor = settings.Descriptor; + _writer = new BinaryWriter(_stream); + _buffer = new byte[BufferLength]; + _maxChars = BufferLength/Encoding.UTF8.GetMaxByteCount(1); + } + + /// + /// Initializes a new instance of the class. + /// + /// The stream. + /// The descriptor. + public BsonWriter(Stream stream, IBsonObjectDescriptor descriptor) + { + _stream = stream; + _descriptor = descriptor; + _writer = new BinaryWriter(_stream); + _buffer = new byte[BufferLength]; + _maxChars = BufferLength/Encoding.UTF8.GetMaxByteCount(1); + } + + /// + /// Writes the value. + /// + /// Type of the data. + /// The obj. + public void WriteValue(BsonType type, Object obj) + { + switch(type) + { + case BsonType.MinKey: + case BsonType.MaxKey: + case BsonType.Null: + return; + case BsonType.Boolean: + _writer.Write((bool)obj); + return; + case BsonType.Integer: + _writer.Write((int)obj); + return; + case BsonType.Long: + if(obj is TimeSpan) + _writer.Write(((TimeSpan)obj).Ticks); + else + _writer.Write((long)obj); + return; + case BsonType.Date: + Write((DateTime)obj); + return; + case BsonType.Oid: + Write((Oid)obj); + return; + case BsonType.Number: + _writer.Write(Convert.ToDouble(obj)); + return; + case BsonType.String: + if(obj is string) + Write((string)obj); + else + Write(obj.ToString()); + return; + case BsonType.Obj: + if(obj is DBRef) + Write((DBRef)obj); + else + WriteObject(obj); + return; + case BsonType.Array: + WriteArray((IEnumerable)obj); + return; + case BsonType.Regex: + if(obj is Regex) + Write(new MongoRegex((Regex)obj)); + else + Write((MongoRegex)obj); + return; + case BsonType.Code: + Write((Code)obj); + return; + case BsonType.Symbol: + WriteValue(BsonType.String, ((MongoSymbol)obj).Value); + return; + case BsonType.CodeWScope: + Write((CodeWScope)obj); + return; + case BsonType.Binary: + { + if(obj is Guid) + Write((Guid)obj); + else if(obj is byte[]) + Write((byte[])obj); + else + Write((Binary)obj); + return; + } + default: + throw new NotImplementedException(String.Format("Writing {0} types not implemented.", obj.GetType().Name)); + } + } + + /// + /// Writes the specified id. + /// + /// The id. + private void Write(Oid id) + { + _writer.Write(id.ToByteArray()); + } + + /// + /// Writes the specified binary. + /// + /// The binary. + private void Write(Binary binary) + { + if(binary.Subtype == BinarySubtype.General) + { + _writer.Write(binary.Bytes.Length + 4); + _writer.Write((byte)binary.Subtype); + _writer.Write(binary.Bytes.Length); + } + else + { + _writer.Write(binary.Bytes.Length); + _writer.Write((byte)binary.Subtype); + } + _writer.Write(binary.Bytes); + } + + /// + /// Writes the specified GUID. + /// + /// The GUID. + private void Write(Guid guid) + { + _writer.Write(16); + _writer.Write((byte)3); + _writer.Write(guid.ToByteArray()); + } + + /// + /// Writes the specified bytes. + /// + /// The bytes. + private void Write(byte[] bytes) + { + Write(new Binary(bytes)); + } + + /// + /// Writes the specified code scope. + /// + /// The code scope. + private void Write(CodeWScope codeScope) + { + _writer.Write(CalculateSize(codeScope)); + WriteValue(BsonType.String, codeScope.Value); + WriteValue(BsonType.Obj, codeScope.Scope); + } + + /// + /// Writes the specified code. + /// + /// The code. + private void Write(Code code) + { + WriteValue(BsonType.String, code.Value); + } + + /// + /// Writes the specified regex. + /// + /// The regex. + private void Write(MongoRegex regex) + { + Write(regex.Expression, false); + Write(regex.RawOptions, false); + } + + /// + /// Writes the specified reference. + /// + /// The reference. + public void Write(DBRef reference) + { + WriteObject((Document)reference); + } + + /// + /// Writes the specified data time. + /// + /// The data time. + private void Write(DateTime dateTime) + { + var diff = dateTime.ToUniversalTime() - BsonInfo.Epoch; + var time = Math.Floor(diff.TotalMilliseconds); + _writer.Write((long)time); + } + + /// + /// Writes the object. + /// + /// The obj. + public void WriteObject(object obj) + { + obj = _descriptor.BeginObject(obj); + WriteElements(obj); + _descriptor.EndObject(obj); + } + + /// + /// Writes the elements. + /// + /// The obj. + private void WriteElements(object obj) + { + var properties = _descriptor.GetProperties(obj); + var size = CalculateSizeObject(obj, properties); + if(size >= BsonInfo.MaxDocumentSize) + throw new ArgumentException("Maximum document size exceeded."); + _writer.Write(size); + foreach(var property in properties) + { + _descriptor.BeginProperty(obj, property); + var bsonType = TranslateToBsonType(property.Value); + _writer.Write((byte)bsonType); + Write(property.Name, false); + WriteValue(bsonType, property.Value); + _descriptor.EndProperty(obj, property); + } + _writer.Write((byte)0); + } + + /// + /// Writes the array. + /// + /// The enumerable. + public void WriteArray(IEnumerable enumerable) + { + var obj = _descriptor.BeginArray(enumerable); + WriteElements(obj); + _descriptor.EndArray(obj); + } + + /// + /// Writes the specified value. + /// + /// The value. + private void Write(string value) + { + Write(value, true); + } + + /// + /// Writes the specified value. + /// + /// The value. + /// if set to true [include length]. + public void Write(string value, bool includeLength) + { + if(includeLength) + _writer.Write(CalculateSize(value, false)); + var byteCount = Encoding.UTF8.GetByteCount(value); + if(byteCount < BufferLength) + { + Encoding.UTF8.GetBytes(value, 0, value.Length, _buffer, 0); + _writer.Write(_buffer, 0, byteCount); + } + else + { + int charCount; + var totalCharsWritten = 0; + + for(var i = value.Length; i > 0; i -= charCount) + { + charCount = (i > _maxChars) ? _maxChars : i; + var count = Encoding.UTF8.GetBytes(value, totalCharsWritten, charCount, _buffer, 0); + _writer.Write(_buffer, 0, count); + totalCharsWritten += charCount; + } + } + _writer.Write((byte)0); + } + + /// + /// Calculates the size. + /// + /// The obj. + /// + public int CalculateSize(Object obj) + { + if(obj == null) + return 0; + + switch(TranslateToBsonType(obj)) + { + case BsonType.MinKey: + case BsonType.MaxKey: + case BsonType.Null: + return 0; + case BsonType.Boolean: + return 1; + case BsonType.Integer: + return 4; + case BsonType.Long: + case BsonType.Date: + return 8; + case BsonType.Oid: + return 12; + case BsonType.Number: + return sizeof(Double); + case BsonType.String: + if(obj is string) + return CalculateSize((string)obj); + return CalculateSize(obj.ToString()); + case BsonType.Obj: + return obj.GetType() == typeof(DBRef) ? CalculateSize((DBRef)obj) : CalculateSizeObject(obj); + case BsonType.Array: + return CalculateSize((IEnumerable)obj); + case BsonType.Regex: + if(obj is Regex) + return CalculateSize(new MongoRegex((Regex)obj)); + return CalculateSize((MongoRegex)obj); + case BsonType.Code: + return CalculateSize((Code)obj); + case BsonType.CodeWScope: + return CalculateSize((CodeWScope)obj); + case BsonType.Binary: + { + if(obj is Guid) + return CalculateSize((Guid)obj); + if(obj is byte[]) + return CalculateSize((byte[])obj); + + return CalculateSize((Binary)obj); + } + case BsonType.Symbol: + return CalculateSize(((MongoSymbol)obj).Value, true); + } + + throw new NotImplementedException(String.Format("Calculating size of {0} is not implemented.", obj.GetType().Name)); + } + + /// + /// Calculates the size. + /// + /// The code. + /// + private int CalculateSize(Code code) + { + return CalculateSize(code.Value, true); + } + + /// + /// Calculates the size. + /// + /// The regex. + /// + public int CalculateSize(MongoRegex regex) + { + var size = CalculateSize(regex.Expression, false); + size += CalculateSize(regex.RawOptions, false); + return size; + } + + /// + /// Calculates the size. + /// + /// The code scope. + /// + public int CalculateSize(CodeWScope codeScope) + { + var size = 4; + size += CalculateSize(codeScope.Value, true); + size += CalculateSizeObject(codeScope.Scope); + return size; + } + + /// + /// Calculates the size. + /// + /// The binary. + /// + public int CalculateSize(Binary binary) + { + var size = 4; //size int + size += 1; //subtype + if(binary.Subtype == BinarySubtype.General) + size += 4; //embedded size int + size += binary.Bytes.Length; + return size; + } + + /// + /// Calculates the size. + /// + /// The bytes. + /// + public int CalculateSize(byte[] bytes) + { + return CalculateSize(new Binary(bytes)); + } + + /// + /// Calculates the size. + /// + /// The GUID. + /// + public int CalculateSize(Guid guid) + { + return 21; + } + + /// + /// Calculates the size. + /// + /// The reference. + /// + public int CalculateSize(DBRef reference) + { + return CalculateSizeObject((Document)reference); + } + + /// + /// Calculates the size object. + /// + /// The obj. + /// + public int CalculateSizeObject(object obj) + { + obj = _descriptor.BeginObject(obj); + var properties = _descriptor.GetProperties(obj); + + var size = CalculateSizeObject(obj, properties); + + _descriptor.EndObject(obj); + + return size; + } + + /// + /// Calculates the size object. + /// + /// The obj. + /// The propertys. + /// + private int CalculateSizeObject(object obj, IEnumerable propertys) + { + var size = 4; + foreach(var property in propertys) + { + var elsize = 1; //type + _descriptor.BeginProperty(obj, property); + elsize += CalculateSize(property.Name, false); + elsize += CalculateSize(property.Value); + _descriptor.EndProperty(obj, property); + size += elsize; + } + size += 1; //terminator + return size; + } + + /// + /// Calculates the size. + /// + /// The enumerable. + /// + public int CalculateSize(IEnumerable enumerable) + { + var obj = _descriptor.BeginArray(enumerable); + var properties = _descriptor.GetProperties(obj); + + var size = CalculateSizeObject(obj, properties); + + _descriptor.EndArray(obj); + + return size; + } + + /// + /// Calculates the size. + /// + /// The value. + /// + public int CalculateSize(String value) + { + return CalculateSize(value, true); + } + + /// + /// Calculates the size. + /// + /// The value. + /// if set to true [include length]. + /// + public int CalculateSize(String value, bool includeLength) + { + var size = 1; //terminator + if(includeLength) + size += 4; + if(value != null) + size += Encoding.UTF8.GetByteCount(value); + return size; + } + + /// + /// Flushes this instance. + /// + public void Flush() + { + _writer.Flush(); + } + + /// + /// Translates the type of to bson. + /// + /// The obj. + /// + protected BsonType TranslateToBsonType(object obj) + { + //TODO:Convert to use a dictionary + if(obj == null) + return BsonType.Null; + + var type = obj.GetType(); + + if(obj is Enum) //special case enums + type = Enum.GetUnderlyingType(type); + if(type == typeof(Double)) + return BsonType.Number; + if(type == typeof(Single)) + return BsonType.Number; + if(type == typeof(String)) + return BsonType.String; + if(type == typeof(Uri)) + return BsonType.String; + if(type == typeof(int)) + return BsonType.Integer; + if(type == typeof(long)) + return BsonType.Long; + if(type == typeof(bool)) + return BsonType.Boolean; + if(type == typeof(Oid)) + return BsonType.Oid; + if(type == typeof(DateTime)) + return BsonType.Date; + if(type == typeof(TimeSpan)) + return BsonType.Long; + if(type == typeof(MongoRegex)) + return BsonType.Regex; + if(type == typeof(Regex)) + return BsonType.Regex; + if(type == typeof(DBRef)) + return BsonType.Obj; + if(type == typeof(Code)) + return BsonType.Code; + if(type == typeof(CodeWScope)) + return BsonType.CodeWScope; + if(type == typeof(DBNull)) + return BsonType.Null; + if(type == typeof(Binary)) + return BsonType.Binary; + if(type == typeof(Guid)) + return BsonType.Binary; + if(type == typeof(MongoMinKey)) + return BsonType.MinKey; + if(type == typeof(MongoMaxKey)) + return BsonType.MaxKey; + if(type == typeof(MongoSymbol)) + return BsonType.Symbol; + if(type == typeof(byte[])) + return BsonType.Binary; + if(_descriptor.IsArray(obj)) + return BsonType.Array; + if(_descriptor.IsObject(obj)) + return BsonType.Obj; + + if(type == typeof(Decimal)) + throw new ArgumentOutOfRangeException("Decimal is not supported in the BSON spec. So it is also not supported in MongoDB. " + + "You could convert it to double or store it as Binary instead."); + + throw new ArgumentOutOfRangeException(String.Format("Type: {0} not recognized", type.FullName)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/BsonWriterSettings.cs b/source/MongoDB/Bson/BsonWriterSettings.cs new file mode 100644 index 00000000..4f5355b0 --- /dev/null +++ b/source/MongoDB/Bson/BsonWriterSettings.cs @@ -0,0 +1,35 @@ +using System; + +namespace MongoDB.Bson +{ + /// + /// + /// + public class BsonWriterSettings + { + /// + /// Initializes a new instance of the class. + /// + public BsonWriterSettings() + : this(new BsonDocumentDescriptor()) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The descriptor. + public BsonWriterSettings(IBsonObjectDescriptor descriptor){ + if(descriptor == null) + throw new ArgumentNullException("descriptor"); + + Descriptor = descriptor; + } + + /// + /// Gets or sets the descriptor. + /// + /// The descriptor. + public IBsonObjectDescriptor Descriptor { get; private set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/IBsonObjectBuilder.cs b/source/MongoDB/Bson/IBsonObjectBuilder.cs new file mode 100644 index 00000000..d81eb113 --- /dev/null +++ b/source/MongoDB/Bson/IBsonObjectBuilder.cs @@ -0,0 +1,49 @@ +namespace MongoDB.Bson +{ + /// + /// + /// + public interface IBsonObjectBuilder + { + /// + /// Begins the object. + /// + /// + object BeginObject(); + + /// + /// Ends the object. + /// + /// The instance. + /// + object EndObject(object instance); + + /// + /// Begins the array. + /// + /// + object BeginArray(); + + /// + /// Ends the array. + /// + /// The instance. + /// + object EndArray(object instance); + + /// + /// Begins the property. + /// + /// The instance. + /// The name. + void BeginProperty(object instance, string name); + + /// + /// Ends the property. + /// + /// The instance. + /// The name. + /// The value. + void EndProperty(object instance, string name, object value); + } +} \ No newline at end of file diff --git a/source/MongoDB/Bson/IBsonObjectDescriptor.cs b/source/MongoDB/Bson/IBsonObjectDescriptor.cs new file mode 100644 index 00000000..5e964d34 --- /dev/null +++ b/source/MongoDB/Bson/IBsonObjectDescriptor.cs @@ -0,0 +1,75 @@ +using System.Collections.Generic; + +namespace MongoDB.Bson +{ + /// + /// + /// + public interface IBsonObjectDescriptor + { + /// + /// Begins the object. + /// + /// The instance. + /// + object BeginObject(object instance); + + /// + /// Begins the array. + /// + /// The instance. + /// + object BeginArray(object instance); + + /// + /// Gets the propertiess. + /// + /// The instance. + /// + IEnumerable GetProperties(object instance); + + /// + /// Begins the property. + /// + /// The instance. + /// The property. + void BeginProperty(object instance, BsonProperty property); + + /// + /// Ends the property. + /// + /// The instance. + /// The property. + void EndProperty(object instance, BsonProperty property); + + /// + /// Ends the array. + /// + /// The instance. + void EndArray(object instance); + + /// + /// Ends the object. + /// + /// The instance. + void EndObject(object instance); + + /// + /// Determines whether the specified instance is array. + /// + /// The instance. + /// + /// true if the specified instance is array; otherwise, false. + /// + bool IsArray(object instance); + + /// + /// Determines whether the specified instance is object. + /// + /// The instance. + /// + /// true if the specified instance is object; otherwise, false. + /// + bool IsObject(object instance); + } +} \ No newline at end of file diff --git a/source/MongoDB/Code.cs b/source/MongoDB/Code.cs new file mode 100644 index 00000000..eeb0c647 --- /dev/null +++ b/source/MongoDB/Code.cs @@ -0,0 +1,110 @@ +using System; +using MongoDB.Util; + +namespace MongoDB +{ + /// + /// + [Serializable] + public sealed class Code : IEquatable + { + /// + /// Initializes a new instance of the class. + /// + public Code() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The value. + public Code(string value) + { + Value = value; + } + + /// + /// Gets or sets the value. + /// + /// The value. + public string Value { get; set; } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(Code other) + { + if(ReferenceEquals(null, other)) + return false; + return ReferenceEquals(this, other) || Equals(other.Value, Value); + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(Code) && Equals((Code)obj); + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(Code left, Code right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(Code left, Code right) + { + return !Equals(left, right); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + return (Value != null ? Value.GetHashCode() : 0); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return string.Format(@"{{ ""$code"": ""{0}"" }}", JsonFormatter.Escape(Value)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/CodeWScope.cs b/source/MongoDB/CodeWScope.cs new file mode 100644 index 00000000..359282e5 --- /dev/null +++ b/source/MongoDB/CodeWScope.cs @@ -0,0 +1,120 @@ +using System; + +namespace MongoDB +{ + /// + /// + [Serializable] + public sealed class CodeWScope : IEquatable + { + /// + /// Initializes a new instance of the class. + /// + public CodeWScope() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The code. + public CodeWScope(String code) + : this(code, new Document()) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The code. + /// The scope. + public CodeWScope(String code, Document scope) + { + Value = code; + Scope = scope; + } + + /// + /// Gets or sets the value. + /// + /// The value. + public string Value { get; set; } + + /// + /// Gets or sets the scope. + /// + /// The scope. + public Document Scope { get; set; } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(CodeWScope other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + return Equals(other.Value, Value) && Equals(other.Scope, Scope); + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(CodeWScope) && Equals((CodeWScope)obj); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + unchecked + { + return ((Value != null ? Value.GetHashCode() : 0)*397) ^ (Scope != null ? Scope.GetHashCode() : 0); + } + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(CodeWScope left, CodeWScope right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(CodeWScope left, CodeWScope right) + { + return !Equals(left, right); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/CollectionMetadata.cs b/source/MongoDB/CollectionMetadata.cs new file mode 100644 index 00000000..d75d06ae --- /dev/null +++ b/source/MongoDB/CollectionMetadata.cs @@ -0,0 +1,158 @@ +using System; +using System.Collections.Generic; +using System.Text; +using MongoDB.Configuration; +using MongoDB.Connections; + +namespace MongoDB +{ + /// + /// Lazily loaded meta data on the collection. + /// + public class CollectionMetadata + { + private readonly MongoDatabase _database; + private readonly string _fullName; + private readonly Dictionary _indexes = new Dictionary(); + private readonly string _name; + private bool _gotIndexes; + private Document _options; + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// Name of the database. + /// The name. + /// The connection. + internal CollectionMetadata(MongoConfiguration configuration, string databaseName, string collectionName, Connection connection) + { + //Todo: Add public constrcutors for users to call + _fullName = databaseName + "." + collectionName; + _name = collectionName; + _database = new MongoDatabase(configuration, connection, databaseName); + } + + /// + /// Gets the options. + /// + /// The options. + public Document Options + { + get + { + if(_options != null) + return _options; + var doc = _database["system.namespaces"].FindOne(new Document().Add("name", _fullName)) ?? new Document(); + if(doc.ContainsKey("create")) + doc.Remove("create"); + //Not sure why this is here. The python driver has it. + _options = doc; + return _options; + } + } + + /// + /// Gets the indexes. + /// + /// The indexes. + public Dictionary Indexes + { + get + { + if(_gotIndexes) + return _indexes; + + _indexes.Clear(); + + var docs = _database["system.indexes"].Find(new Document().Add("ns", _fullName)); + foreach(var doc in docs.Documents) + _indexes.Add((string)doc["name"], doc); + + return _indexes; + } + } + + /// + /// Creates the index. + /// + /// The name. + /// The fields and directions. + /// if set to true [unique]. + public void CreateIndex(string name, Document fieldsAndDirections, bool unique) + { + var index = new Document(); + index["name"] = name; + index["ns"] = _fullName; + index["key"] = fieldsAndDirections; + index["unique"] = unique; + _database["system.indexes"].Insert(index); + Refresh(); + } + + /// + /// Creates the index. + /// + /// The fields and directions. + /// if set to true [unique]. + public void CreateIndex(Document fieldsAndDirections, bool unique) + { + var name = generateIndexName(fieldsAndDirections, unique); + CreateIndex(name, fieldsAndDirections, unique); + } + + /// + /// Drops the index. + /// + /// The name. + public void DropIndex(string name) + { + var cmd = new Document(); + cmd.Add("deleteIndexes", _name).Add("index", name); + _database.SendCommand(cmd); + Refresh(); + } + + /// + /// Renames the specified new name. + /// + /// The new name. + public void Rename(string newName) + { + if(string.IsNullOrEmpty(newName)) + throw new ArgumentException("Name must not be null or empty", "newName"); + + var cmd = new Document(); + cmd.Add("renameCollection", _fullName).Add("to", _database.Name + "." + newName); + _database.GetSisterDatabase("admin").SendCommand(cmd); + Refresh(); + } + + /// + /// Refreshes this instance. + /// + public void Refresh() + { + _indexes.Clear(); + _gotIndexes = false; + _options = null; + } + + /// + /// Generates the name of the index. + /// + /// The fields and directions. + /// if set to true [unique]. + /// + protected string generateIndexName(Document fieldsAndDirections, bool unique) + { + var sb = new StringBuilder("_"); + foreach(var key in fieldsAndDirections.Keys) + sb.Append(key).Append("_"); + if(unique) + sb.Append("unique_"); + + return sb.ToString(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Commands/MapReduceCommand.cs b/source/MongoDB/Commands/MapReduceCommand.cs new file mode 100644 index 00000000..199eb651 --- /dev/null +++ b/source/MongoDB/Commands/MapReduceCommand.cs @@ -0,0 +1,133 @@ +using System; + +namespace MongoDB.Commands +{ + /// + /// A fluent interface for executing a Map/Reduce call against a collection. + /// + public class MapReduceCommand + { + internal Document Command { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// The name. + internal MapReduceCommand(string name) + { + Command = new Document("mapreduce", name); + Verbose = true; + } + + /// + /// Gets the name. + /// + /// The name. + public string Name + { + get { return (String)Command["mapreduce"]; } + } + + /// + /// The map function references the variable this to inspect the current object under consideration. + /// A map function must call emit(key,value) at least once, but may be invoked any number of times, + /// as may be appropriate. + /// + public Code Map + { + get { return (Code)Command["map"]; } + set { Command["map"] = value; } + } + + /// + /// The reduce function receives a key and an array of values. To use, reduce the received values, + /// and return a result. + /// + /// + /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions + /// must be idempotent. If you need to perform an operation only once, use a finalize function. + /// + public Code Reduce + { + get { return (Code)Command["reduce"]; } + set { Command["reduce"] = value; } + } + + /// + /// Gets or sets the query. + /// + /// The query. + public Document Query + { + get { return (Document)Command["query"]; } + set { Command["query"] = value; } + } + + /// + /// Sort the query. Useful for optimization + /// + public Document Sort + { + get { return (Document)Command["sort"]; } + set { Command["sort"] = value; } + } + + /// + /// Number of objects to return from collection + /// + public long Limit + { + get { return (long)Command["limit"]; } + set { Command["limit"] = value; } + } + + /// + /// Name of the final collection the results should be stored in. + /// + /// + /// A temporary collection is still used and then renamed to the target name atomically. + /// + public string Out + { + get { return (string)Command["out"]; } + set { Command["out"] = value; } + } + + /// + /// When true the generated collection is not treated as temporary. Specifying out automatically makes + /// the collection permanent + /// + public bool KeepTemp + { + get { return Convert.ToBoolean(Command["keeptemp"]); } + set { Command["keeptemp"] = value; } + } + + /// + /// Provides statistics on job execution time. + /// + public bool Verbose + { + get { return (bool)Command["verbose"]; } + set { Command["verbose"] = value; } + } + + /// + /// Function to apply to all the results when finished. + /// + public Code Finalize + { + get { return (Code)Command["finalize"]; } + set { Command["finalize"] = value; } + } + + /// + /// Document where fields go into javascript global scope + /// + public Document Scope + { + get { return (Document)Command["scope"]; } + set { Command["scope"] = value; } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Builders/AutoMappingProfileBuilder.cs b/source/MongoDB/Configuration/Builders/AutoMappingProfileBuilder.cs new file mode 100644 index 00000000..f1ece0f0 --- /dev/null +++ b/source/MongoDB/Configuration/Builders/AutoMappingProfileBuilder.cs @@ -0,0 +1,342 @@ +using System; +using System.Reflection; + +using MongoDB.Configuration.Mapping.Auto; +using MongoDB.Configuration.Mapping.Conventions; +using MongoDB.Util; + +namespace MongoDB.Configuration.Builders +{ + /// + /// + /// + public class AutoMappingProfileBuilder + { + private readonly AutoMappingProfile _profile; + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + internal AutoMappingProfileBuilder(AutoMappingProfile profile) + { + if (profile == null) + throw new ArgumentNullException("profile"); + + _profile = profile; + } + + /// + /// Aliaseses the are camel cased. + /// + /// + public AutoMappingProfileBuilder AliasesAreCamelCased() + { + _profile.Conventions.AliasConvention = new DelegateAliasConvention(m => Inflector.ToCamelCase(m.Name)); + return this; + } + + /// + /// Aliaseses the are. + /// + /// The alias. + /// + public AutoMappingProfileBuilder AliasesAre(Func alias) + { + _profile.Conventions.AliasConvention = new DelegateAliasConvention(alias); + return this; + } + + /// + /// Collectionses the are named. + /// + /// Name of the collection. + /// + public AutoMappingProfileBuilder CollectionsAreNamed(Func collectionName) + { + _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(collectionName); + return this; + } + + /// + /// Collections the names are camel cased. + /// + /// + public AutoMappingProfileBuilder CollectionNamesAreCamelCased() + { + _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(t => Inflector.ToCamelCase(t.Name)); + return this; + } + + /// + /// Collections the names are camel cased and plural. + /// + /// + public AutoMappingProfileBuilder CollectionNamesAreCamelCasedAndPlural() + { + _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(t => Inflector.MakePlural(Inflector.ToCamelCase(t.Name))); + return this; + } + + /// + /// Conventionses the are. + /// + /// The conventions. + /// + public AutoMappingProfileBuilder ConventionsAre(ConventionProfile conventions) + { + _profile.Conventions = conventions; + return this; + } + + /// + /// Discriminators the aliases are. + /// + /// The discriminator alias. + /// + public AutoMappingProfileBuilder DiscriminatorAliasesAre(Func discriminatorAlias) + { + _profile.Conventions.DiscriminatorAliasConvention = new DelegateDiscriminatorAliasConvention(discriminatorAlias); + return this; + } + + /// + /// Discriminators the values are. + /// + /// The discriminator. + /// + public AutoMappingProfileBuilder DiscriminatorValuesAre(Func discriminator) + { + _profile.Conventions.DiscriminatorConvention = new DelegateDiscriminatorConvention(discriminator); + return this; + } + + /// + /// Extendeds the properties are. + /// + /// The extended property. + /// + public AutoMappingProfileBuilder ExtendedPropertiesAre(Func extendedProperty) + { + _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(extendedProperty); + return this; + } + + /// + /// Extendeds the properties are. + /// + /// The extended property. + /// The member types. + /// The binding flags. + /// + public AutoMappingProfileBuilder ExtendedPropertiesAre(Func extendedProperty, MemberTypes memberTypes, BindingFlags bindingFlags) + { + _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(extendedProperty, memberTypes, bindingFlags); + return this; + } + + /// + /// Extendeds the properties are named. + /// + /// The name. + /// + public AutoMappingProfileBuilder ExtendedPropertiesAreNamed(string name) + { + _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(m => m.Name == name); + return this; + } + + /// + /// Extendeds the properties are named. + /// + /// The name. + /// The member types. + /// The binding flags. + /// + public AutoMappingProfileBuilder ExtendedPropertiesAreNamed(string name, MemberTypes memberTypes, BindingFlags bindingFlags) + { + _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(m => m.Name == name, memberTypes, bindingFlags); + return this; + } + + /// + /// Finds the members with. + /// + /// The member finder. + /// + public AutoMappingProfileBuilder FindMembersWith(IMemberFinder memberFinder) + { + _profile.MemberFinder = memberFinder; + return this; + } + + /// + /// Idses the are. + /// + /// The id. + /// + public AutoMappingProfileBuilder IdsAre(Func id) + { + _profile.Conventions.IdConvention = new DelegateIdConvention(id); + return this; + } + + /// + /// Idses the are. + /// + /// The id. + /// The member types. + /// The binding flags. + /// + public AutoMappingProfileBuilder IdsAre(Func id, MemberTypes memberTypes, BindingFlags bindingFlags) + { + _profile.Conventions.IdConvention = new DelegateIdConvention(id, memberTypes, bindingFlags); + return this; + } + + /// + /// Idses the are named. + /// + /// The name. + /// + public AutoMappingProfileBuilder IdsAreNamed(string name) + { + _profile.Conventions.IdConvention = new DelegateIdConvention(m => m.Name == name); + return this; + } + + /// + /// Idses the are named. + /// + /// The name. + /// The member types. + /// The binding flags. + /// + public AutoMappingProfileBuilder IdsAreNamed(string name, MemberTypes memberTypes, BindingFlags bindingFlags) + { + _profile.Conventions.IdConvention = new DelegateIdConvention(m => m.Name == name, memberTypes, bindingFlags); + return this; + } + + /// + /// Subs the classes are. + /// + /// The is sub class. + /// + public AutoMappingProfileBuilder SubClassesAre(Func isSubClass) + { + _profile.IsSubClassDelegate = isSubClass; + return this; + } + + /// + /// Uses the collection adapter convention. + /// + /// The collection adapter convention. + /// + public AutoMappingProfileBuilder UseCollectionAdapterConvention(ICollectionAdapterConvention collectionAdapterConvention) + { + _profile.Conventions.CollectionAdapterConvention = collectionAdapterConvention; + return this; + } + + /// + /// Uses the collection name convention. + /// + /// The collection name convention. + /// + public AutoMappingProfileBuilder UseCollectionNameConvention(ICollectionNameConvention collectionNameConvention) + { + _profile.Conventions.CollectionNameConvention = collectionNameConvention; + return this; + } + + /// + /// Uses the default value convention. + /// + /// The default value convention. + /// + public AutoMappingProfileBuilder UseDefaultValueConvention(IDefaultValueConvention defaultValueConvention) + { + _profile.Conventions.DefaultValueConvention = defaultValueConvention; + return this; + } + + /// + /// Uses the discriminator alias convention. + /// + /// The discriminator alias convention. + /// + public AutoMappingProfileBuilder UseDiscriminatorAliasConvention(IDiscriminatorAliasConvention discriminatorAliasConvention) + { + _profile.Conventions.DiscriminatorAliasConvention = discriminatorAliasConvention; + return this; + } + + /// + /// Uses the discriminator convention. + /// + /// The discriminator convention. + /// + public AutoMappingProfileBuilder UseDiscriminatorConvention(IDiscriminatorConvention discriminatorConvention) + { + _profile.Conventions.DiscriminatorConvention = discriminatorConvention; + return this; + } + + /// + /// Uses the extended properties convention. + /// + /// The extended properties convention. + /// + public AutoMappingProfileBuilder UseExtendedPropertiesConvention(IExtendedPropertiesConvention extendedPropertiesConvention) + { + _profile.Conventions.ExtendedPropertiesConvention = extendedPropertiesConvention; + return this; + } + + /// + /// Uses the id convention. + /// + /// The id convention. + /// + public AutoMappingProfileBuilder UseIdConvention(IIdConvention idConvention) + { + _profile.Conventions.IdConvention = idConvention; + return this; + } + + /// + /// Uses the id generator convention. + /// + /// The id generator convention. + /// + public AutoMappingProfileBuilder UseIdGeneratorConvention(IIdGeneratorConvention idGeneratorConvention) + { + _profile.Conventions.IdGeneratorConvention = idGeneratorConvention; + return this; + } + + /// + /// Uses the id unsaved value convention. + /// + /// The id unsaved value convention. + /// + public AutoMappingProfileBuilder UseIdUnsavedValueConvention(IIdUnsavedValueConvention idUnsavedValueConvention) + { + _profile.Conventions.IdUnsavedValueConvention = idUnsavedValueConvention; + return this; + } + + /// + /// Uses the member alias convention. + /// + /// The alias convention. + /// + public AutoMappingProfileBuilder UseMemberAliasConvention(IAliasConvention aliasConvention) + { + _profile.Conventions.AliasConvention = aliasConvention; + return this; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Builders/ClassOverridesBuilder.cs b/source/MongoDB/Configuration/Builders/ClassOverridesBuilder.cs new file mode 100644 index 00000000..e6acddc4 --- /dev/null +++ b/source/MongoDB/Configuration/Builders/ClassOverridesBuilder.cs @@ -0,0 +1,131 @@ +using System; +using MongoDB.Configuration.Mapping.Auto; +using System.Linq.Expressions; +using System.Reflection; + +namespace MongoDB.Configuration.Builders +{ + /// + /// + /// + /// + public class ClassOverridesBuilder + { + private readonly ClassOverrides _overrides; + + /// + /// Initializes a new instance of the class. + /// + /// The overrides. + internal ClassOverridesBuilder(ClassOverrides overrides) + { + if (overrides == null) + throw new ArgumentNullException("overrides"); + + _overrides = overrides; + } + + /// + /// Collections the name. + /// + /// The name. + public void CollectionName(string name) + { + _overrides.CollectionName = name; + } + + /// + /// Ids the specified member. + /// + /// The member. + /// + public IdOverridesBuilder Id(MemberInfo member) + { + var id = new IdOverrides { Member = member }; + _overrides.IdOverrides = id; + return new IdOverridesBuilder(id); + } + + /// + /// Ids the specified name. + /// + /// The name. + /// + public IdOverridesBuilder Id(string name) + { + var members = typeof(T).GetMember(name, MemberTypes.Field | MemberTypes.Property, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (members == null || members.Length == 0) + throw new InvalidOperationException("No member was found."); + if (members.Length > 1) + throw new InvalidOperationException("More than one member matched the specified name."); + + return Id(members[0]); + } + + /// + /// Ids the specified member. + /// + /// The member. + /// + public IdOverridesBuilder Id(Expression> member) + { + var mex = GetMemberExpression(member); + return Id(mex.Member.Name); + } + + /// + /// Members the specified member. + /// + /// The member. + /// + public MemberOverridesBuilder Member(MemberInfo member) + { + var overrides = _overrides.GetOverridesFor(member); + return new MemberOverridesBuilder(overrides); + } + + /// + /// Members the specified name. + /// + /// The name. + /// + public MemberOverridesBuilder Member(string name) + { + var members = typeof(T).GetMember(name, MemberTypes.Field | MemberTypes.Property, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (members == null || members.Length == 0) + throw new InvalidOperationException("No member was found."); + if (members.Length > 1) + throw new InvalidOperationException("More than one member matched the specified name."); + + return Member(members[0]); + } + + /// + /// Members the specified member. + /// + /// The member. + /// + public MemberOverridesBuilder Member(Expression> member) + { + var mex = GetMemberExpression(member); + return Member(mex.Member.Name); + } + + /// + /// Gets the member expression. + /// + /// The member. + /// + private MemberExpression GetMemberExpression(Expression> member) + { + var memberExpression = member.Body as MemberExpression; + if (memberExpression == null) + { + var unaryExpression = member.Body as UnaryExpression; + memberExpression = unaryExpression.Operand as MemberExpression; + } + + return memberExpression; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Builders/IdOverridesBuilder.cs b/source/MongoDB/Configuration/Builders/IdOverridesBuilder.cs new file mode 100644 index 00000000..0277a5b9 --- /dev/null +++ b/source/MongoDB/Configuration/Builders/IdOverridesBuilder.cs @@ -0,0 +1,58 @@ +using System; +using MongoDB.Configuration.Mapping.Auto; +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Builders +{ + /// + /// + /// + public class IdOverridesBuilder + { + private readonly IdOverrides _overrides; + + /// + /// Initializes a new instance of the class. + /// + /// The overrides. + internal IdOverridesBuilder(IdOverrides overrides) + { + if (overrides == null) + throw new ArgumentNullException("overrides"); + + _overrides = overrides; + } + + /// + /// Generateds the by. + /// + /// + /// + public IdOverridesBuilder GeneratedBy() where T : IIdGenerator, new() + { + return GeneratedBy(new T()); + } + + /// + /// Generateds the by. + /// + /// The generator. + /// + public IdOverridesBuilder GeneratedBy(IIdGenerator generator) + { + _overrides.Generator = generator; + return this; + } + + /// + /// Unsaveds the value. + /// + /// The unsaved value. + /// + public IdOverridesBuilder UnsavedValue(object unsavedValue) + { + _overrides.UnsavedValue = unsavedValue; + return this; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Builders/MappingStoreBuilder.cs b/source/MongoDB/Configuration/Builders/MappingStoreBuilder.cs new file mode 100644 index 00000000..257e89e5 --- /dev/null +++ b/source/MongoDB/Configuration/Builders/MappingStoreBuilder.cs @@ -0,0 +1,152 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Auto; +using MongoDB.Configuration.Mapping; + +namespace MongoDB.Configuration.Builders +{ + /// + /// + /// + public class MappingStoreBuilder + { + private IAutoMappingProfile _defaultProfile; + private readonly List _eagerMapTypes; + private readonly ClassOverridesMap _overrides; + private readonly List _profiles; + + /// + /// Initializes a new instance of the class. + /// + public MappingStoreBuilder() + { + _eagerMapTypes = new List(); + _overrides = new ClassOverridesMap(); + _profiles = new List(); + } + + /// + /// Gets the mapping store. + /// + /// + public IMappingStore BuildMappingStore() + { + IAutoMapper autoMapper; + if (_profiles.Count > 0) + { + var agg = new AggregateAutoMapper(); + foreach (var p in _profiles) + agg.AddAutoMapper(new AutoMapper(CreateOverrideableProfile(p.Profile), p.Filter)); + + agg.AddAutoMapper(new AutoMapper(CreateOverrideableProfile(_defaultProfile ?? new AutoMappingProfile()))); + autoMapper = agg; + } + else + autoMapper = new AutoMapper(CreateOverrideableProfile(_defaultProfile ?? new AutoMappingProfile())); + + var store = new AutoMappingStore(autoMapper); + + foreach (var type in _eagerMapTypes) + store.GetClassMap(type); + + return store; + } + + /// + /// Configures the default profile. + /// + /// The config. + public void DefaultProfile(Action config) + { + if (config == null) + throw new ArgumentNullException("config"); + + var dp = _defaultProfile as AutoMappingProfile ?? new AutoMappingProfile(); + + config(new AutoMappingProfileBuilder(dp)); + _defaultProfile = dp; + } + + /// + /// Configures the default profile. + /// + /// The default profile. + public void DefaultProfile(IAutoMappingProfile defaultProfile) + { + if (defaultProfile == null) + throw new ArgumentNullException("defaultProfile"); + + _defaultProfile = defaultProfile; + } + + /// + /// Configures a custom profile. + /// + /// The filter. + /// The config. + public void CustomProfile(Func filter, Action config) + { + if (config == null) + throw new ArgumentNullException("config"); + + var p = new AutoMappingProfile(); + config(new AutoMappingProfileBuilder(p)); + CustomProfile(filter, p); + } + + /// + /// Adds a custom profile. + /// + /// The filter. + /// The profile. + public void CustomProfile(Func filter, IAutoMappingProfile profile) + { + if (filter == null) + throw new ArgumentNullException("filter"); + if (profile == null) + throw new ArgumentNullException("profile"); + + _profiles.Add(new FilteredProfile { Filter = filter, Profile = profile }); + } + + /// + /// Maps this instance. + /// + /// + public void Map() + { + _eagerMapTypes.Add(typeof(T)); + } + + /// + /// Maps the specified config. + /// + /// + /// The config. + public void Map(Action> config) + { + var c = new ClassOverridesBuilder(_overrides.GetOverridesForType(typeof(T))); + config(c); + Map(); + } + + /// + /// Creates the overrideable profile. + /// + /// The profile. + /// + private IAutoMappingProfile CreateOverrideableProfile(IAutoMappingProfile profile) + { + return new OverridableAutoMappingProfile(profile, _overrides); + } + + /// + /// + /// + private class FilteredProfile + { + public Func Filter; + public IAutoMappingProfile Profile; + } + } +} diff --git a/source/MongoDB/Configuration/Builders/MemberOverridesBuilder.cs b/source/MongoDB/Configuration/Builders/MemberOverridesBuilder.cs new file mode 100644 index 00000000..ef551933 --- /dev/null +++ b/source/MongoDB/Configuration/Builders/MemberOverridesBuilder.cs @@ -0,0 +1,67 @@ +using System; +using MongoDB.Configuration.Mapping.Auto; + +namespace MongoDB.Configuration.Builders +{ + /// + /// + /// + public class MemberOverridesBuilder + { + private readonly MemberOverrides _overrides; + + /// + /// Initializes a new instance of the class. + /// + /// The overrides. + internal MemberOverridesBuilder(MemberOverrides overrides) + { + if (overrides == null) + throw new ArgumentNullException("overrides"); + + _overrides = overrides; + } + + /// + /// Aliases the specified name. + /// + /// The name. + /// + public MemberOverridesBuilder Alias(string name) + { + _overrides.Alias = name; + return this; + } + + /// + /// Defaults the value. + /// + /// The default value. + /// + public MemberOverridesBuilder DefaultValue(object defaultValue) + { + _overrides.DefaultValue = defaultValue; + return this; + } + + /// + /// Ignores this instance. + /// + /// + public MemberOverridesBuilder Ignore() + { + _overrides.Ignore = true; + return this; + } + + /// + /// Persists the default value. + /// + /// + public MemberOverridesBuilder PersistDefaultValue() + { + _overrides.PersistDefaultValue = true; + return this; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/CollectionAdapters/ArrayCollectionAdapter.cs b/source/MongoDB/Configuration/CollectionAdapters/ArrayCollectionAdapter.cs new file mode 100644 index 00000000..d658e27e --- /dev/null +++ b/source/MongoDB/Configuration/CollectionAdapters/ArrayCollectionAdapter.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.CollectionAdapters +{ + /// + /// + /// + public class ArrayCollectionAdapter : ICollectionAdapter + { + /// + /// Adds the element to instance. + /// + /// Type of the element. + /// The elements. + /// + public object CreateCollection(Type elementType, object[] elements) + { + return ValueConverter.ConvertArray(elements, elementType); + } + + /// + /// Gets the elements from collection. + /// + /// The collection. + /// + public IEnumerable GetElementsFromCollection(object collection) + { + return (IEnumerable)collection; + } + } +} diff --git a/source/MongoDB/Configuration/CollectionAdapters/ArrayListCollectionAdapter.cs b/source/MongoDB/Configuration/CollectionAdapters/ArrayListCollectionAdapter.cs new file mode 100644 index 00000000..9352a926 --- /dev/null +++ b/source/MongoDB/Configuration/CollectionAdapters/ArrayListCollectionAdapter.cs @@ -0,0 +1,32 @@ +using System; +using System.Collections; + +namespace MongoDB.Configuration.CollectionAdapters +{ + /// + /// + /// + public class ArrayListCollectionAdapter : ICollectionAdapter + { + /// + /// Adds the element to instance. + /// + /// Type of the element. + /// The elements. + /// + public object CreateCollection(Type elementType, object[] elements) + { + return new ArrayList(elements); + } + + /// + /// Gets the elements from collection. + /// + /// The collection. + /// + public IEnumerable GetElementsFromCollection(object collection) + { + return (IEnumerable)collection; + } + } +} diff --git a/source/MongoDB/Configuration/CollectionAdapters/GenericListCollectionAdapter.cs b/source/MongoDB/Configuration/CollectionAdapters/GenericListCollectionAdapter.cs new file mode 100644 index 00000000..a10d718f --- /dev/null +++ b/source/MongoDB/Configuration/CollectionAdapters/GenericListCollectionAdapter.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.CollectionAdapters +{ + /// + /// + /// + public class GenericListCollectionAdapter : ICollectionAdapter + { + static readonly Type OpenListType = typeof(List<>); + + /// + /// Adds the element to instance. + /// + /// Type of the element. + /// The elements. + /// + public object CreateCollection(Type elementType, object[] elements) + { + var closedListType = OpenListType.MakeGenericType(elementType); + var typedElements = ValueConverter.ConvertArray(elements, elementType); + return Activator.CreateInstance(closedListType, typedElements); + } + + /// + /// Gets the elements from collection. + /// + /// The collection. + /// + public IEnumerable GetElementsFromCollection(object collection) + { + return (IEnumerable)collection; + } + } +} diff --git a/source/MongoDB/Configuration/CollectionAdapters/GenericSetCollectionAdapter.cs b/source/MongoDB/Configuration/CollectionAdapters/GenericSetCollectionAdapter.cs new file mode 100644 index 00000000..7db51c9f --- /dev/null +++ b/source/MongoDB/Configuration/CollectionAdapters/GenericSetCollectionAdapter.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.CollectionAdapters +{ + /// + /// + /// + public class GenericSetCollectionAdapter : ICollectionAdapter + { + static readonly Type OpenSetType = typeof(HashSet<>); + + /// + /// Adds the element to instance. + /// + /// Type of the element. + /// The elements. + /// + public object CreateCollection(Type elementType, object[] elements) + { + var closedSetType = OpenSetType.MakeGenericType(elementType); + var typedElements = ValueConverter.ConvertArray(elements,elementType); + return Activator.CreateInstance(closedSetType, new[] { typedElements }); + } + + /// + /// Gets the elements from collection. + /// + /// The collection. + /// + public IEnumerable GetElementsFromCollection(object collection) + { + return (IEnumerable)collection; + } + } +} diff --git a/source/MongoDB/Configuration/CollectionAdapters/ICollectionAdapter.cs b/source/MongoDB/Configuration/CollectionAdapters/ICollectionAdapter.cs new file mode 100644 index 00000000..5b77c659 --- /dev/null +++ b/source/MongoDB/Configuration/CollectionAdapters/ICollectionAdapter.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections; + +namespace MongoDB.Configuration.CollectionAdapters +{ + /// + /// + /// + public interface ICollectionAdapter + { + /// + /// Adds the element to instance. + /// + /// Type of the element. + /// The elements. + /// + object CreateCollection(Type elementType, object[] elements); + + /// + /// Gets the elements from collection. + /// + /// The collection. + /// + IEnumerable GetElementsFromCollection(object collection); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/DictionaryAdapters/GenericDictionaryDictionaryAdapter.cs b/source/MongoDB/Configuration/DictionaryAdapters/GenericDictionaryDictionaryAdapter.cs new file mode 100644 index 00000000..b2cc1e7b --- /dev/null +++ b/source/MongoDB/Configuration/DictionaryAdapters/GenericDictionaryDictionaryAdapter.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.DictionaryAdapters +{ + /// + /// + /// + public class GenericDictionaryDictionaryAdapter : IDictionaryAdapter + { + /// + /// Gets the type of the key. + /// + /// The type of the key. + public Type KeyType + { + get { return typeof(TKey); } + } + + /// + /// Gets the type of the value. + /// + /// The type of the value. + public Type ValueType + { + get { return typeof(TValue); } + } + + /// + /// Creates the dictionary. + /// + /// The document. + /// + public object CreateDictionary(Document document) + { + if(document==null) + return null; + + return document.ToDictionary(pair => (TKey)ValueConverter.Convert(pair.Key, typeof(TKey)), pair => (TValue)pair.Value); + } + + /// + /// Gets the pairs. + /// + /// The collection. + /// + public Document GetDocument(object dictionary) + { + var instance = dictionary as IDictionary; + + if (instance == null) + return null; + + var doc = new Document(); + + foreach (var e in instance) + doc.Add(ValueConverter.ConvertKey(e.Key), e.Value); + + return doc; + } + + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/DictionaryAdapters/GenericSortedListDictionaryAdapter.cs b/source/MongoDB/Configuration/DictionaryAdapters/GenericSortedListDictionaryAdapter.cs new file mode 100644 index 00000000..2f971776 --- /dev/null +++ b/source/MongoDB/Configuration/DictionaryAdapters/GenericSortedListDictionaryAdapter.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.DictionaryAdapters +{ + /// + /// + /// + public class GenericSortedListDictionaryAdapter : IDictionaryAdapter + { + /// + /// Gets the type of the key. + /// + /// The type of the key. + public Type KeyType + { + get { return typeof(TKey); } + } + + /// + /// Gets the type of the value. + /// + /// The type of the value. + public Type ValueType + { + get { return typeof(TValue); } + } + + /// + /// Creates the dictionary. + /// + /// The document. + /// + public object CreateDictionary(Document document) + { + if(document == null) + return null; + + var list = new SortedList(); + + foreach(var pair in document) + list.Add((TKey)ValueConverter.Convert(pair.Key, typeof(TKey)), (TValue)pair.Value); + + return list; + } + + /// + /// Gets the pairs. + /// + /// The collection. + /// + public Document GetDocument(object dictionary) + { + var instance = dictionary as IDictionary; + + if (instance == null) + return null; + + var doc = new Document(); + + foreach (var e in instance) + doc.Add(ValueConverter.ConvertKey(e.Key), e.Value); + + return doc; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/DictionaryAdapters/HashtableDictionaryAdapter.cs b/source/MongoDB/Configuration/DictionaryAdapters/HashtableDictionaryAdapter.cs new file mode 100644 index 00000000..fecad297 --- /dev/null +++ b/source/MongoDB/Configuration/DictionaryAdapters/HashtableDictionaryAdapter.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.DictionaryAdapters +{ + /// + /// + /// + public class HashtableDictionaryAdapter : IDictionaryAdapter + { + /// + /// Gets the type of the key. + /// + /// The type of the key. + public Type KeyType + { + get { return typeof(object); } + } + + /// + /// Gets the type of the value. + /// + /// The type of the value. + public Type ValueType + { + get { return typeof(object); } + } + + /// + /// Creates the dictionary. + /// + /// The document. + /// + public object CreateDictionary(Document document) + { + if(document == null) + return null; + + var hashtable = new Hashtable(); + + foreach (var pair in document) + hashtable.Add(pair.Key, pair.Value); + + return hashtable; + } + + /// + /// Gets the document. + /// + /// The collection. + /// + public Document GetDocument(object collection) + { + var hashtable = collection as Hashtable; + if (hashtable == null) + return new Document(); + + var doc = new Document(); + foreach (DictionaryEntry entry in hashtable) + doc.Add(ValueConverter.ConvertKey(entry.Key), entry.Value); + + return doc; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/DictionaryAdapters/IDictionaryAdapter.cs b/source/MongoDB/Configuration/DictionaryAdapters/IDictionaryAdapter.cs new file mode 100644 index 00000000..eaab56b1 --- /dev/null +++ b/source/MongoDB/Configuration/DictionaryAdapters/IDictionaryAdapter.cs @@ -0,0 +1,36 @@ +using System; + +namespace MongoDB.Configuration.DictionaryAdapters +{ + /// + /// + /// + public interface IDictionaryAdapter + { + /// + /// Gets the type of the key. + /// + /// The type of the key. + Type KeyType { get; } + + /// + /// Gets the type of the value. + /// + /// The type of the value. + Type ValueType { get; } + + /// + /// Creates the dictionary. + /// + /// The document. + /// + object CreateDictionary(Document document); + + /// + /// Gets the document. + /// + /// The dictionary. + /// + Document GetDocument(object dictionary); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/IdGenerators/AssignedIdGenerator.cs b/source/MongoDB/Configuration/IdGenerators/AssignedIdGenerator.cs new file mode 100644 index 00000000..8b3ba3f2 --- /dev/null +++ b/source/MongoDB/Configuration/IdGenerators/AssignedIdGenerator.cs @@ -0,0 +1,26 @@ +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.IdGenerators +{ + /// + /// + /// + public class AssignedIdGenerator : IIdGenerator + { + /// + /// Generates an id for the specified entity. + /// + /// The entity. + /// The id map. + /// + public object Generate(object entity, IdMap idMap) + { + var id = idMap.GetValue(entity); + + if (Equals(id, idMap.UnsavedValue)) + throw new IdGenerationException(string.Format("Ids for {0} must be manually assigned before saving.", entity.GetType())); + + return id; + } + } +} diff --git a/source/MongoDB/Configuration/IdGenerators/GuidCombGenerator.cs b/source/MongoDB/Configuration/IdGenerators/GuidCombGenerator.cs new file mode 100644 index 00000000..b98ac755 --- /dev/null +++ b/source/MongoDB/Configuration/IdGenerators/GuidCombGenerator.cs @@ -0,0 +1,48 @@ +using System; + +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.IdGenerators +{ + /// + /// + /// + public class GuidCombGenerator : IIdGenerator + { + /// + /// Generates an id for the specified entity. + /// + /// The entity. + /// The id map. + /// + /// + /// This code was taken from NHibernate. + /// + public object Generate(object entity, IdMap idMap) + { + var guidArray = Guid.NewGuid().ToByteArray(); + + var baseDate = new DateTime(1900, 1, 1); + var now = DateTime.Now; + + // Get the days and milliseconds which will be used to build the byte string + var days = new TimeSpan(now.Ticks - baseDate.Ticks); + var msecs = now.TimeOfDay; + + // Convert to a byte array + // Note that SQL Server is accurate to 1/300th of a millisecond so we divide by 3.333333 + var daysArray = BitConverter.GetBytes(days.Days); + var msecsArray = BitConverter.GetBytes((long)(msecs.TotalMilliseconds / 3.333333)); + + // Reverse the bytes to match SQL Servers ordering + Array.Reverse(daysArray); + Array.Reverse(msecsArray); + + // Copy the bytes into the guid + Array.Copy(daysArray, daysArray.Length - 2, guidArray, guidArray.Length - 6, 2); + Array.Copy(msecsArray, msecsArray.Length - 4, guidArray, guidArray.Length - 4, 4); + + return new Guid(guidArray); + } + } +} diff --git a/source/MongoDB/Configuration/IdGenerators/IIdGenerator.cs b/source/MongoDB/Configuration/IdGenerators/IIdGenerator.cs new file mode 100644 index 00000000..74d63c58 --- /dev/null +++ b/source/MongoDB/Configuration/IdGenerators/IIdGenerator.cs @@ -0,0 +1,18 @@ +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.IdGenerators +{ + /// + /// + /// + public interface IIdGenerator + { + /// + /// Generates an id for the specified entity. + /// + /// The entity. + /// The id map. + /// + object Generate(object entity, IdMap idMap); + } +} diff --git a/source/MongoDB/Configuration/IdGenerators/OidGenerator.cs b/source/MongoDB/Configuration/IdGenerators/OidGenerator.cs new file mode 100644 index 00000000..58490317 --- /dev/null +++ b/source/MongoDB/Configuration/IdGenerators/OidGenerator.cs @@ -0,0 +1,20 @@ +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.IdGenerators +{ + /// + /// + public class OidGenerator : IIdGenerator + { + /// + /// Generates an id for the specified entity. + /// + /// The entity. + /// The id map. + /// + public object Generate(object entity, IdMap idMap) + { + return Oid.NewOid(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/AggregateAutoMapper.cs b/source/MongoDB/Configuration/Mapping/Auto/AggregateAutoMapper.cs new file mode 100644 index 00000000..3ece8c08 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/AggregateAutoMapper.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; + +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public class AggregateAutoMapper : IAutoMapper + { + private readonly List _autoMappers; + + /// + /// Initializes a new instance of the class. + /// + public AggregateAutoMapper() + { + this._autoMappers = new List(); + } + + /// + /// Adds the auto mapper. + /// + /// The auto mapper. + public void AddAutoMapper(IAutoMapper autoMapper) + { + if (autoMapper == null) + throw new ArgumentNullException("autoMapper"); + + this._autoMappers.Add(autoMapper); + } + + /// + /// Creates the class map. + /// + /// Type of the entity. + /// The class map finder. + /// + public IClassMap CreateClassMap(Type classType, Func classMapFinder) + { + foreach (var autoMapper in _autoMappers) + { + var classMap = autoMapper.CreateClassMap(classType, classMapFinder); + if (classMap != null) + return classMap; + } + + throw new Exception(string.Format("Unable to create map for {0}.", classType)); + } + } +} diff --git a/source/MongoDB/Configuration/Mapping/Auto/AutoMapper.cs b/source/MongoDB/Configuration/Mapping/Auto/AutoMapper.cs new file mode 100644 index 00000000..a3a302a8 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/AutoMapper.cs @@ -0,0 +1,213 @@ +using System; +using System.Reflection; +using MongoDB.Configuration.Mapping.Model; +using MongoDB.Configuration.Mapping.Util; +using MongoDB.Util; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + public class AutoMapper : IAutoMapper + { + private readonly Func _filter; + private readonly IAutoMappingProfile _profile; + + /// + /// Initializes a new instance of the class. + /// + public AutoMapper() + : this(null, null){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + public AutoMapper(IAutoMappingProfile profile) + : this(profile, null){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The filter. + public AutoMapper(Func filter) + : this(null, filter){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + /// The filter. + public AutoMapper(IAutoMappingProfile profile, Func filter){ + _filter = filter ?? new Func(t => true); + _profile = profile ?? new AutoMappingProfile(); + } + + /// + /// Creates the class map. + /// + /// Type of the entity. + /// The class map finder. + /// + public IClassMap CreateClassMap(Type classType, Func classMapFinder){ + if(classType == null) + throw new ArgumentNullException("classType"); + if(classMapFinder == null) + throw new ArgumentNullException("classMapFinder"); + + if(classType.IsInterface) + throw new NotSupportedException("Only classes can be mapped currently."); + + if(!_filter(classType)) + return null; + + if(_profile.IsSubClass(classType)) + return CreateSubClassMap(classType, classMapFinder); + + return CreateClassMap(classType); + } + + /// + /// Creates the class map. + /// + /// Type of the entity. + /// + private ClassMap CreateClassMap(Type classType){ + var classMap = new ClassMap(classType) + { + CollectionName = _profile.GetCollectionName(classType), + DiscriminatorAlias = _profile.GetDiscriminatorAlias(classType) + }; + //if(!classType.IsInterface && !classType.IsAbstract) + // classMap.Discriminator = _profile.GetDiscriminator(classType); + + classMap.IdMap = CreateIdMap(classType); + classMap.ExtendedPropertiesMap = CreateExtendedPropertiesMap(classType); + + foreach(var member in _profile.FindMembers(classType)) + { + if(classMap.HasId && classMap.IdMap.MemberName == member.Name) + continue; + if(classMap.HasExtendedProperties && classMap.ExtendedPropertiesMap.MemberName == member.Name) + continue; + + classMap.AddMemberMap(CreateMemberMap(classType, member)); + } + + return classMap; + } + + private SubClassMap CreateSubClassMap(Type classType, Func classMapFinder){ + //TODO: should probably do something different to find the base type + //mabe a convention? + var superClassMap = classMapFinder(classType.BaseType); + if(superClassMap == null) + throw new InvalidOperationException(string.Format("Unable to find super class map for subclass {0}", classType)); + + var subClassMap = new SubClassMap(classType); + ((ClassMapBase)superClassMap).AddSubClassMap(subClassMap); + var discriminator = _profile.GetDiscriminator(classType); + var parentDiscriminator = superClassMap.Discriminator; + if(parentDiscriminator == null) + subClassMap.Discriminator = discriminator; + else + { + Array array; + if(parentDiscriminator.GetType().IsArray) + array = Array.CreateInstance(typeof(object), ((Array)parentDiscriminator).Length + 1); + else + { + array = Array.CreateInstance(typeof(object), 2); + array.SetValue(parentDiscriminator, 0); + } + + array.SetValue(discriminator, array.Length - 1); + subClassMap.Discriminator = array; + } + + foreach(var member in _profile.FindMembers(classType)) + { + if(subClassMap.HasId && subClassMap.IdMap.MemberName == member.Name) + continue; + + if(subClassMap.HasExtendedProperties && subClassMap.ExtendedPropertiesMap.MemberName == member.Name) + continue; + + if(superClassMap.GetMemberMapFromMemberName(member.Name) != null) + continue; //don't want to remap a member + + subClassMap.AddMemberMap(CreateMemberMap(classType, member)); + } + + return subClassMap; + } + + private ExtendedPropertiesMap CreateExtendedPropertiesMap(Type classType){ + var extPropMember = _profile.FindExtendedPropertiesMember(classType); + if(extPropMember == null) + return null; + + return new ExtendedPropertiesMap( + extPropMember.Name, + extPropMember.GetReturnType(), + MemberReflectionOptimizer.GetGetter(extPropMember), + MemberReflectionOptimizer.GetSetter(extPropMember)); + } + + private IdMap CreateIdMap(Type classType){ + var idMember = _profile.FindIdMember(classType); + if(idMember == null) + return null; + + var memberReturnType = idMember.GetReturnType(); + + return new IdMap( + idMember.Name, + memberReturnType, + MemberReflectionOptimizer.GetGetter(idMember), + MemberReflectionOptimizer.GetSetter(idMember), + _profile.GetIdGenerator(classType, idMember), + _profile.GetIdUnsavedValue(classType, idMember)); + } + + private PersistentMemberMap CreateMemberMap(Type classType, MemberInfo member){ + var memberReturnType = member.GetReturnType(); + + var dictionaryAdapter = _profile.GetDictionaryAdapter(classType, member, memberReturnType); + if (dictionaryAdapter != null) + return new DictionaryMemberMap( + member.Name, + MemberReflectionOptimizer.GetGetter(member), + MemberReflectionOptimizer.GetSetter(member), + _profile.GetAlias(classType, member), + _profile.GetPersistDefaultValue(classType, member), + dictionaryAdapter); + + var collectionType = _profile.GetCollectionAdapter(classType, member, memberReturnType); + if(collectionType != null) + return new CollectionMemberMap( + member.Name, + memberReturnType, + MemberReflectionOptimizer.GetGetter(member), + MemberReflectionOptimizer.GetSetter(member), + _profile.GetAlias(classType, member), + _profile.GetPersistDefaultValue(classType, member), + collectionType, + _profile.GetCollectionElementType(classType, member, memberReturnType)); + + //TODO: reference checking... + + return new PersistentMemberMap( + member.Name, + memberReturnType, + MemberReflectionOptimizer.GetGetter(member), + MemberReflectionOptimizer.GetSetter(member), + _profile.GetDefaultValue(classType, member), + _profile.GetAlias(classType, member), + _profile.GetPersistDefaultValue(classType, member)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/AutoMappingProfile.cs b/source/MongoDB/Configuration/Mapping/Auto/AutoMappingProfile.cs new file mode 100644 index 00000000..23079632 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/AutoMappingProfile.cs @@ -0,0 +1,271 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using MongoDB.Attributes; +using MongoDB.Configuration.CollectionAdapters; +using MongoDB.Configuration.IdGenerators; +using MongoDB.Configuration.Mapping.Conventions; +using MongoDB.Util; +using MongoDB.Configuration.DictionaryAdapters; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public class AutoMappingProfile : IAutoMappingProfile + { + private ConventionProfile _conventions; + private Func _isSubClass; + private IMemberFinder _memberFinder; + + /// + /// Gets or sets the conventions. + /// + /// The conventions. + public ConventionProfile Conventions + { + get { return _conventions; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _conventions = value; + } + } + + /// + /// Gets or sets the member finder. + /// + /// The member finder. + public IMemberFinder MemberFinder + { + get { return _memberFinder; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _memberFinder = value; + } + } + + /// + /// Gets or sets the is sub class. + /// + /// The is sub class. + public Func IsSubClassDelegate + { + get { return _isSubClass; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _isSubClass = value; + } + } + + /// + /// Initializes a new instance of the class. + /// + public AutoMappingProfile() + { + _conventions = new ConventionProfile(); + _isSubClass = t => false; + _memberFinder = DefaultMemberFinder.Instance; + } + + /// + /// Finds the extended properties member. + /// + /// Type of the class. + /// + public MemberInfo FindExtendedPropertiesMember(Type classType) + { + return _conventions.ExtendedPropertiesConvention.GetExtendedPropertiesMember(classType); + } + + /// + /// Gets the id member for the class type. + /// + /// Type of the entity. + /// + public MemberInfo FindIdMember(Type classType) + { + var members = (from memberInfo in _memberFinder.FindMembers(classType) + let att = memberInfo.GetCustomAttribute(true) + where att != null + select memberInfo).ToList(); + + if (members.Count > 1) + throw new InvalidOperationException("Cannot have more than 1 member marked with a MongoId Attribute."); + if(members.Count == 0) + return _conventions.IdConvention.GetIdMember(classType); + return members[0]; + } + + /// + /// Finds the members to map for the class type. + /// + /// Type of the entity. + /// + public IEnumerable FindMembers(Type classType) + { + return from memberInfo in _memberFinder.FindMembers(classType) + let doMap = memberInfo.GetCustomAttribute(true) == null + where doMap + select memberInfo; + } + + /// + /// Gets the property name for the member. + /// + /// Type of the entity. + /// The member. + /// + public virtual string GetAlias(Type classType, MemberInfo member) + { + string alias = null; + var att = member.GetCustomAttribute(true); + if (att != null) + alias = att.Name; + if (string.IsNullOrEmpty(alias)) + alias = _conventions.AliasConvention.GetAlias(member) ?? member.Name; + + return alias; + } + + /// + /// Gets the collection name for the class type. + /// + /// Type of the entity. + /// + public virtual string GetCollectionName(Type classType) + { + return _conventions.CollectionNameConvention.GetCollectionName(classType) ?? classType.Name; + } + + /// + /// Gets the type of the collection. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public ICollectionAdapter GetCollectionAdapter(Type classType, MemberInfo member, Type memberReturnType) + { + return _conventions.CollectionAdapterConvention.GetCollectionAdapter(memberReturnType); + } + + /// + /// Gets the type of the collection element. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public Type GetCollectionElementType(Type classType, MemberInfo member, Type memberReturnType) + { + return _conventions.CollectionAdapterConvention.GetElementType(memberReturnType); + } + + /// + /// Gets the default value. + /// + /// Type of the class. + /// The member. + /// + public virtual object GetDefaultValue(Type classType, MemberInfo member) + { + object defaultValue = null; + var att = member.GetCustomAttribute(true); + if (att != null) + defaultValue = att.Value; + return defaultValue ?? (_conventions.DefaultValueConvention.GetDefaultValue(member.GetReturnType())); + } + + /// + /// Gets the dictionary adadpter. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public IDictionaryAdapter GetDictionaryAdapter(Type classType, MemberInfo member, Type memberReturnType) + { + return _conventions.DictionaryAdapterConvention.GetDictionaryAdapter(memberReturnType); + } + + /// + /// Gets the descriminator for the class type. + /// + /// Type of the entity. + /// + public object GetDiscriminator(Type classType) + { + return _conventions.DiscriminatorConvention.GetDiscriminator(classType); + } + + /// + /// Gets the property name of the discriminator for the class type. + /// + /// Type of the entity. + /// + public string GetDiscriminatorAlias(Type classType) + { + return _conventions.DiscriminatorAliasConvention.GetDiscriminatorAlias(classType); + } + + /// + /// Gets the id generator for the member. + /// + /// + /// The member. + /// + public IIdGenerator GetIdGenerator(Type classType, MemberInfo member) + { + return _conventions.IdGeneratorConvention.GetGenerator(member.GetReturnType()); + } + + /// + /// Gets the unsaved value for the id. + /// + /// Type of the entity. + /// The member. + /// + public object GetIdUnsavedValue(Type classType, MemberInfo member) + { + return _conventions.IdUnsavedValueConvention.GetUnsavedValue(member.GetReturnType()); + } + + /// + /// Gets a value indicating whether the member should be persisted if it is null. + /// + /// Type of the class. + /// The member. + /// + public bool GetPersistDefaultValue(Type classType, MemberInfo member) + { + var att = member.GetCustomAttribute(true); + if (att != null) + return att.PersistDefaultValue; + + return true; + } + + /// + /// Indicates whether the class type is a sub class. + /// + /// Type of the class. + /// + /// true if the classType is a sub class; otherwise, false. + /// + public bool IsSubClass(Type classType) + { + return _isSubClass(classType); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/ClassOverrides.cs b/source/MongoDB/Configuration/Mapping/Auto/ClassOverrides.cs new file mode 100644 index 00000000..b04f4897 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/ClassOverrides.cs @@ -0,0 +1,47 @@ +using System.Collections.Generic; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public class ClassOverrides + { + private readonly Dictionary _memberOverrides; + + /// + /// Gets or sets the name of the collection. + /// + /// The name of the collection. + public string CollectionName { get; set; } + + /// + /// Gets or sets the id overrides. + /// + /// The id overrides. + public IdOverrides IdOverrides { get; set; } + + /// + /// Initializes a new instance of the class. + /// + public ClassOverrides() + { + _memberOverrides = new Dictionary(); + } + + /// + /// Gets the overrides for. + /// + /// The member info. + /// + public MemberOverrides GetOverridesFor(MemberInfo memberInfo) + { + MemberOverrides memberOverrides; + if (!_memberOverrides.TryGetValue(memberInfo, out memberOverrides)) + memberOverrides = _memberOverrides[memberInfo] = new MemberOverrides(); + + return memberOverrides; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/ClassOverridesMap.cs b/source/MongoDB/Configuration/Mapping/Auto/ClassOverridesMap.cs new file mode 100644 index 00000000..5d977476 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/ClassOverridesMap.cs @@ -0,0 +1,50 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public class ClassOverridesMap + { + private readonly Dictionary _overrides; + + /// + /// Initializes a new instance of the class. + /// + public ClassOverridesMap() + { + _overrides = new Dictionary(); + } + + /// + /// Gets the class overrides for the type. + /// + /// The type. + /// + public ClassOverrides GetOverridesForType(Type type) + { + if (type == null) + throw new ArgumentNullException("type"); + + ClassOverrides classOverrides; + if (!this._overrides.TryGetValue(type, out classOverrides)) + classOverrides = this._overrides[type] = new ClassOverrides(); + + return classOverrides; + } + + /// + /// Determines whether [has overrides for type] [the specified type]. + /// + /// The type. + /// + /// true if [has overrides for type] [the specified type]; otherwise, false. + /// + public bool HasOverridesForType(Type type) + { + return _overrides.ContainsKey(type); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/DefaultMemberFinder.cs b/source/MongoDB/Configuration/Mapping/Auto/DefaultMemberFinder.cs new file mode 100644 index 00000000..bd59e419 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/DefaultMemberFinder.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public class DefaultMemberFinder : IMemberFinder + { + /// + /// + public static readonly DefaultMemberFinder Instance = new DefaultMemberFinder(); + + /// + /// Initializes a new instance of the class. + /// + private DefaultMemberFinder() + { + } + + /// + /// Finds the members. + /// + /// The type. + /// + public IEnumerable FindMembers(Type type) + { + foreach (var prop in type.GetProperties(BindingFlags.Instance|BindingFlags.NonPublic|BindingFlags.Public)) + { + var getMethod = prop.GetGetMethod(true); + var setMethod = prop.GetSetMethod(true); + + if(getMethod==null || getMethod.IsPrivate || setMethod==null) + continue; + + if (setMethod.GetParameters().Length != 1) //an indexer + continue; + + yield return prop; + } + + foreach (var field in type.GetFields()) //all public fields + { + if (!field.IsInitOnly && !field.IsLiteral) //readonly + yield return field; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/IAutoMapper.cs b/source/MongoDB/Configuration/Mapping/Auto/IAutoMapper.cs new file mode 100644 index 00000000..8538866c --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/IAutoMapper.cs @@ -0,0 +1,20 @@ +using System; + +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public interface IAutoMapper + { + /// + /// Creates the class map. + /// + /// Type of the entity. + /// The class map finder. + /// + IClassMap CreateClassMap(Type classType, Func classMapFinder); + } +} diff --git a/source/MongoDB/Configuration/Mapping/Auto/IAutoMappingProfile.cs b/source/MongoDB/Configuration/Mapping/Auto/IAutoMappingProfile.cs new file mode 100644 index 00000000..e0cd4362 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/IAutoMappingProfile.cs @@ -0,0 +1,134 @@ +using System; +using System.Collections.Generic; +using System.Reflection; + +using MongoDB.Configuration.CollectionAdapters; +using MongoDB.Configuration.IdGenerators; +using MongoDB.Configuration.DictionaryAdapters; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public interface IAutoMappingProfile + { + /// + /// Finds the extended properties member. + /// + /// Type of the class. + /// + MemberInfo FindExtendedPropertiesMember(Type classType); + + /// + /// Gets the id member for the class type. + /// + /// Type of the entity. + /// + MemberInfo FindIdMember(Type classType); + + /// + /// Finds the members to map for the class type. + /// + /// Type of the entity. + /// + IEnumerable FindMembers(Type classType); + + /// + /// Gets the alias for the specified member. + /// + /// Type of the class. + /// The member. + /// + string GetAlias(Type classType, MemberInfo member); + + /// + /// Gets the collection name for the class type. + /// + /// Type of the entity. + /// + string GetCollectionName(Type classType); + + /// + /// Gets the collection adapter. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + ICollectionAdapter GetCollectionAdapter(Type classType, MemberInfo member, Type memberReturnType); + + /// + /// Gets the type of the collection element. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + Type GetCollectionElementType(Type classType, MemberInfo member, Type memberReturnType); + + /// + /// Gets the default value. + /// + /// Type of the class. + /// The member. + /// + object GetDefaultValue(Type classType, MemberInfo member); + + /// + /// Gets the dictionary adadpter. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + IDictionaryAdapter GetDictionaryAdapter(Type classType, MemberInfo member, Type memberReturnType); + + /// + /// Gets the descriminator for the class type. + /// + /// Type of the entity. + /// + object GetDiscriminator(Type classType); + + /// + /// Gets the discriminator alias. + /// + /// Type of the class. + /// + string GetDiscriminatorAlias(Type classType); + + /// + /// Gets the id generator for the member. + /// + /// Type of the class. + /// The member. + /// + IIdGenerator GetIdGenerator(Type classType, MemberInfo member); + + /// + /// Gets the unsaved value for the id. + /// + /// Type of the entity. + /// The member. + /// + object GetIdUnsavedValue(Type classType, MemberInfo member); + + /// + /// Gets a value indicating whether the member should be persisted if it is it's default value. + /// + /// Type of the class. + /// The member. + /// + bool GetPersistDefaultValue(Type classType, MemberInfo member); + + /// + /// Indicates whether the class type is a sub class. + /// + /// Type of the class. + /// + /// true if the classType is a sub class; otherwise, false. + /// + bool IsSubClass(Type classType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/IMemberFinder.cs b/source/MongoDB/Configuration/Mapping/Auto/IMemberFinder.cs new file mode 100644 index 00000000..30997669 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/IMemberFinder.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + /// + public interface IMemberFinder + { + /// + /// Finds the members. + /// + /// The type. + /// + IEnumerable FindMembers(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/IdOverrides.cs b/source/MongoDB/Configuration/Mapping/Auto/IdOverrides.cs new file mode 100644 index 00000000..b4fb0823 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/IdOverrides.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Reflection; +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// Overrides the Id member. + /// + public class IdOverrides + { + /// + /// Gets or sets the member. + /// + /// The member. + public MemberInfo Member { get; set; } + + /// + /// Gets or sets the generator. + /// + /// The generator. + public IIdGenerator Generator { get; set; } + + /// + /// Gets or sets the unsaved value. + /// + /// The unsaved value. + public object UnsavedValue { get; set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/MemberOverrides.cs b/source/MongoDB/Configuration/Mapping/Auto/MemberOverrides.cs new file mode 100644 index 00000000..171fe8e5 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/MemberOverrides.cs @@ -0,0 +1,31 @@ +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + public class MemberOverrides + { + /// + /// Gets or sets the alias to use for the member. + /// + /// The alias. + public string Alias { get; set; } + + /// + /// Gets or sets the default value. + /// + /// The default value. + public object DefaultValue { get; set; } + + /// + /// Gets or sets a value whether the member should be ignored from the map. + /// + /// true if exclude; otherwise, false. + public bool? Ignore { get; set; } + + /// + /// Gets or sets a value indicating whether a member with the default value gets persisted. + /// + /// The persist default value. + public bool? PersistDefaultValue { get; set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Auto/OverridableAutoMappingProfile.cs b/source/MongoDB/Configuration/Mapping/Auto/OverridableAutoMappingProfile.cs new file mode 100644 index 00000000..ba9938fc --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Auto/OverridableAutoMappingProfile.cs @@ -0,0 +1,279 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using MongoDB.Configuration.CollectionAdapters; +using MongoDB.Configuration.DictionaryAdapters; +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Mapping.Auto +{ + /// + /// + public class OverridableAutoMappingProfile : IAutoMappingProfile + { + private readonly ClassOverridesMap _overrides; + private readonly IAutoMappingProfile _profile; + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + /// The overrides. + public OverridableAutoMappingProfile(IAutoMappingProfile profile, ClassOverridesMap overrides) + { + if(overrides == null) + throw new ArgumentNullException("overrides"); + if(profile == null) + throw new ArgumentNullException("profile"); + + _overrides = overrides; + _profile = profile; + } + + /// + /// Finds the extended properties member. + /// + /// Type of the class. + /// + public MemberInfo FindExtendedPropertiesMember(Type classType) + { + return _profile.FindExtendedPropertiesMember(classType); + } + + /// + /// Gets the id member for the class type. + /// + /// Type of the entity. + /// + public MemberInfo FindIdMember(Type classType) + { + return GetIdOverrideValue( + classType, + o => o.Member, + m => m != null, + _profile.FindIdMember(classType)); + } + + /// + /// Finds the members to map for the class type. + /// + /// Type of the entity. + /// + public IEnumerable FindMembers(Type classType) + { + return _profile.FindMembers(classType) + .Where(member => (bool)GetMemberOverrideValue(classType, member, o => !o.Ignore, v => v.HasValue, true)); + } + + /// + /// Gets the property name for the member. + /// + /// Type of the entity. + /// The member. + /// + public string GetAlias(Type classType, MemberInfo member) + { + var alias = _profile.GetAlias(classType, member); + + return GetMemberOverrideValue(classType, + member, + o => o.Alias, + s => !string.IsNullOrEmpty(s), + alias); + } + + /// + /// Gets the collection name for the class type. + /// + /// Type of the entity. + /// + public string GetCollectionName(Type classType) + { + return GetClassOverrideValue(classType, + o => o.CollectionName, + s => !string.IsNullOrEmpty(s), + _profile.GetCollectionName(classType)); + } + + /// + /// Gets the type of the collection. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public ICollectionAdapter GetCollectionAdapter(Type classType, MemberInfo member, Type memberReturnType) + { + return _profile.GetCollectionAdapter(classType, member, memberReturnType); + } + + /// + /// Gets the type of the collection element. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public Type GetCollectionElementType(Type classType, MemberInfo member, Type memberReturnType) + { + return _profile.GetCollectionElementType(classType, member, memberReturnType); + } + + /// + /// Gets the default value. + /// + /// Type of the class. + /// The member. + /// + public object GetDefaultValue(Type classType, MemberInfo member) + { + var defaultValue = _profile.GetDefaultValue(classType, member); + + return GetMemberOverrideValue(classType, + member, + o => o.DefaultValue, + v => v != null, + defaultValue); + } + + /// + /// Gets the dictionary adadpter. + /// + /// Type of the class. + /// The member. + /// Type of the member return. + /// + public IDictionaryAdapter GetDictionaryAdapter(Type classType, MemberInfo member, Type memberReturnType) + { + return _profile.GetDictionaryAdapter(classType, member, memberReturnType); + } + + /// + /// Gets the descriminator for the class type. + /// + /// Type of the entity. + /// + public object GetDiscriminator(Type classType) + { + return _profile.GetDiscriminator(classType); + } + + /// + /// Gets the property name of the discriminator for the class type. + /// + /// Type of the entity. + /// + public string GetDiscriminatorAlias(Type classType) + { + return _profile.GetDiscriminatorAlias(classType); + } + + /// + /// Gets the id generator for the member. + /// + /// + /// The member. + /// + public IIdGenerator GetIdGenerator(Type classType, MemberInfo member) + { + return GetIdOverrideValue( + classType, + o => o.Generator, + g => g != null, + _profile.GetIdGenerator(classType, member)); + } + + /// + /// Gets the unsaved value for the id. + /// + /// Type of the entity. + /// The member. + /// + public object GetIdUnsavedValue(Type classType, MemberInfo member) + { + return GetIdOverrideValue( + classType, + o => o.UnsavedValue, + v => v != null, + _profile.GetIdUnsavedValue(classType, member)); + } + + /// + /// Gets a value indicating whether the member should be persisted if it is null. + /// + /// Type of the class. + /// The member. + /// + public bool GetPersistDefaultValue(Type classType, MemberInfo member) + { + return (bool)GetMemberOverrideValue(classType, + member, + o => o.PersistDefaultValue, + v => v.HasValue, + _profile.GetPersistDefaultValue(classType, member)); + } + + /// + /// Indicates whether the class type is a sub class. + /// + /// Type of the class. + /// + /// true if the classType is a sub class; otherwise, false. + /// + public bool IsSubClass(Type classType) + { + return _profile.IsSubClass(classType); + } + + /// + /// Gets the class override value. + /// + /// + /// Type of the class. + /// The overrides. + /// The accept. + /// The default value. + /// + private T GetClassOverrideValue(Type classType, Func overrides, Func accept, T defaultValue) + { + if(!_overrides.HasOverridesForType(classType)) + return defaultValue; + + var value = overrides(_overrides.GetOverridesForType(classType)); + + return !accept(value) ? defaultValue : value; + } + + private T GetIdOverrideValue(Type classType, + Func overrides, + Func accept, + T defaultValue) + { + if (!_overrides.HasOverridesForType(classType)) + return defaultValue; + + var idOverrides = _overrides.GetOverridesForType(classType).IdOverrides; + if (idOverrides == null) + return defaultValue; + + var value = overrides(idOverrides); + + return !accept(value) ? defaultValue : value; + } + + private T GetMemberOverrideValue(Type classType, + MemberInfo member, + Func overrides, + Func accept, + T defaultValue) + { + if(!_overrides.HasOverridesForType(classType)) + return defaultValue; + + var value = overrides(_overrides.GetOverridesForType(classType).GetOverridesFor(member)); + + return !accept(value) ? defaultValue : value; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/AutoMappingStore.cs b/source/MongoDB/Configuration/Mapping/AutoMappingStore.cs new file mode 100644 index 00000000..20b8e7fc --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/AutoMappingStore.cs @@ -0,0 +1,114 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using MongoDB.Configuration.Mapping.Auto; +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.Mapping +{ + /// + /// + public class AutoMappingStore : IMappingStore + { + private readonly IAutoMapper _autoMapper; + private readonly Dictionary _autoMaps; + private readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); + private readonly IMappingStore _wrappedMappingStore; + + /// + /// Initializes a new instance of the class. + /// + public AutoMappingStore() + : this(new AutoMapper()) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + public AutoMappingStore(IAutoMappingProfile profile) + : this(new AutoMapper(profile), null) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The auto mapper. + public AutoMappingStore(IAutoMapper autoMapper) + : this(autoMapper, null) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The profile. + /// The mapping store. + public AutoMappingStore(IAutoMappingProfile profile, IMappingStore mappingStore) + : this(new AutoMapper(profile), mappingStore) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The auto mapper. + /// The mapping store. + public AutoMappingStore(IAutoMapper autoMapper, IMappingStore mappingStore) + { + if(autoMapper == null) + throw new ArgumentNullException("autoMapper"); + + _autoMapper = autoMapper; + _autoMaps = new Dictionary(); + _wrappedMappingStore = mappingStore; + } + + /// + /// Gets the class map for the specified class type. + /// + /// Type of the entity. + /// + public IClassMap GetClassMap(Type classType) + { + try + { + _lock.EnterUpgradeableReadLock(); + + IClassMap classMap; + if(_autoMaps.TryGetValue(classType, out classMap)) + return classMap; + + if(_wrappedMappingStore != null) + { + classMap = _wrappedMappingStore.GetClassMap(classType); + if(classMap != null) + return classMap; + } + + classMap = _autoMapper.CreateClassMap(classType, GetClassMap); + + try + { + _lock.EnterWriteLock(); + + _autoMaps.Add(classType, classMap); + + return classMap; + } + finally + { + if(_lock.IsWriteLockHeld) + _lock.ExitWriteLock(); + } + } + finally + { + if(_lock.IsUpgradeableReadLockHeld) + _lock.ExitUpgradeableReadLock(); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/ConventionProfile.cs b/source/MongoDB/Configuration/Mapping/Conventions/ConventionProfile.cs new file mode 100644 index 00000000..dfc40dbb --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/ConventionProfile.cs @@ -0,0 +1,217 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class ConventionProfile + { + private IAliasConvention _aliasConvention; + private ICollectionAdapterConvention _collectionAdapterConvention; + private ICollectionNameConvention _collectionNameConvention; + private IDefaultValueConvention _defaultValueConvention; + private IDictionarynAdapterConvention _dictionaryAdapterConvention; + private IDiscriminatorConvention _discriminatorConvention; + private IDiscriminatorAliasConvention _discriminatorAliasConvention; + private IExtendedPropertiesConvention _extendedPropertiesConvention; + private IIdConvention _idConvention; + private IIdGeneratorConvention _idGeneratorConvention; + private IIdUnsavedValueConvention _idUnsavedValueConvention; + + /// + /// Gets or sets the alias convention. + /// + /// The alias convention. + public IAliasConvention AliasConvention + { + get { return _aliasConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _aliasConvention = value; + } + } + + /// + /// Gets or sets the collection adapter convention. + /// + /// The collection adapter convention. + public ICollectionAdapterConvention CollectionAdapterConvention + { + get { return _collectionAdapterConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _collectionAdapterConvention = value; + } + } + + /// + /// Gets or sets the collection name convention. + /// + /// The collection name convention. + public ICollectionNameConvention CollectionNameConvention + { + get { return _collectionNameConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _collectionNameConvention = value; + } + } + + /// + /// Gets or sets the default value convention. + /// + /// The default value convention. + public IDefaultValueConvention DefaultValueConvention + { + get { return _defaultValueConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _defaultValueConvention = value; + } + } + + /// + /// Gets or sets the dictionary adapter convention. + /// + /// The dictionary adapter convention. + public IDictionarynAdapterConvention DictionaryAdapterConvention + { + get { return _dictionaryAdapterConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _dictionaryAdapterConvention = value; + } + } + + /// + /// Gets or sets the discriminator convention. + /// + /// The discriminator convention. + public IDiscriminatorConvention DiscriminatorConvention + { + get { return _discriminatorConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _discriminatorConvention = value; + } + } + + /// + /// Gets or sets the discriminator alias convention. + /// + /// The discriminator alias convention. + public IDiscriminatorAliasConvention DiscriminatorAliasConvention + { + get { return _discriminatorAliasConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _discriminatorAliasConvention = value; + } + } + + /// + /// Gets or sets the extended properties convention. + /// + /// The extended properties convention. + public IExtendedPropertiesConvention ExtendedPropertiesConvention + { + get { return this._extendedPropertiesConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _extendedPropertiesConvention = value; + } + } + + /// + /// Gets or sets the id convention. + /// + /// The id convention. + public IIdConvention IdConvention + { + get { return _idConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _idConvention = value; + } + } + + /// + /// Gets or sets the id generator convention. + /// + /// The id generator convention. + public IIdGeneratorConvention IdGeneratorConvention + { + get { return _idGeneratorConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _idGeneratorConvention = value; + } + } + + /// + /// Gets or sets the id unsaved value convention. + /// + /// The id unsaved value convention. + public IIdUnsavedValueConvention IdUnsavedValueConvention + { + get { return _idUnsavedValueConvention; } + set + { + if (value == null) + throw new ArgumentNullException("value"); + + _idUnsavedValueConvention = value; + } + } + + /// + /// Initializes a new instance of the class. + /// + public ConventionProfile() + { + _aliasConvention = new DelegateAliasConvention(m => m.Name); + _collectionNameConvention = new DelegateCollectionNameConvention(t => t.Name); + _collectionAdapterConvention = DefaultCollectionAdapterConvention.Instance; + _defaultValueConvention = DefaultDefaultValueConvention.Instance; + _dictionaryAdapterConvention = DefaultDictionaryAdapterConvention.Instance; + _discriminatorConvention = new DelegateDiscriminatorConvention(t => t.Name); + _discriminatorAliasConvention = new DelegateDiscriminatorAliasConvention(t => "_t"); + _extendedPropertiesConvention = new DelegateExtendedPropertiesConvention(m => m.Name == "ExtendedProperties"); + _idConvention = new DelegateIdConvention(m => m.Name == "Id"); + _idGeneratorConvention = DefaultIdGeneratorConvention.Instance; + _idUnsavedValueConvention = DefaultIdUnsavedValueConvention.Instance; + } + } + +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DefaultCollectionAdapterConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DefaultCollectionAdapterConvention.cs new file mode 100644 index 00000000..13c77b1a --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DefaultCollectionAdapterConvention.cs @@ -0,0 +1,137 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using MongoDB.Configuration.CollectionAdapters; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + public class DefaultCollectionAdapterConvention : ICollectionAdapterConvention + { + /// + /// + private static readonly Dictionary CollectionTypes = new Dictionary + { + {typeof(ArrayList), CreateArrayListCollectionType}, + {typeof(IList), CreateArrayListCollectionType}, + {typeof(ICollection), CreateArrayListCollectionType}, + {typeof(IEnumerable), CreateArrayListCollectionType}, + {typeof(HashSet<>), CreateGenericSetCollectionType}, + {typeof(List<>), CreateGenericListCollectionType}, + {typeof(IList<>), CreateGenericListCollectionType}, + {typeof(ICollection<>), CreateGenericListCollectionType}, + {typeof(IEnumerable<>), CreateGenericListCollectionType} + }; + + /// + /// + private static readonly Dictionary ElementTypes = new Dictionary + { + {typeof(ArrayList), GetArrayListElementType}, + {typeof(IList), GetArrayListElementType}, + {typeof(ICollection), GetArrayListElementType}, + {typeof(IEnumerable), GetArrayListElementType}, + {typeof(HashSet<>), GetGenericSetElementType}, + {typeof(List<>), GetGenericListElementType}, + {typeof(IList<>), GetGenericListElementType}, + {typeof(ICollection<>), GetGenericListElementType}, + {typeof(IEnumerable<>), GetGenericListElementType} + }; + + /// + /// + public static readonly DefaultCollectionAdapterConvention Instance = new DefaultCollectionAdapterConvention(); + + /// + /// Initializes a new instance of the class. + /// + private DefaultCollectionAdapterConvention() + { + } + + /// + /// Gets the type of the collection. + /// + /// The type. + /// + public ICollectionAdapter GetCollectionAdapter(Type type) + { + CollectionTypeFactoryDelegate factory; + if(CollectionTypes.TryGetValue(type, out factory)) + return factory(); + + if(type.IsArray && type != typeof(byte[])) + return new ArrayCollectionAdapter(); + + if(type.IsGenericType && !type.IsGenericTypeDefinition) + { + var genericType = type.GetGenericTypeDefinition(); + if(CollectionTypes.TryGetValue(genericType, out factory)) + return factory(); + } + + return null; + } + + /// + /// Gets the type of the element. + /// + /// The type. + /// + public Type GetElementType(Type type) + { + ElementTypeFactoryDelegate factory; + if(ElementTypes.TryGetValue(type, out factory)) + return factory(type); + + if(type.IsArray) + return type.GetElementType(); + + if(type.IsGenericType && !type.IsGenericTypeDefinition) + { + var genericType = type.GetGenericTypeDefinition(); + if(ElementTypes.TryGetValue(genericType, out factory)) + return factory(type); + } + + return null; + } + + private static ArrayListCollectionAdapter CreateArrayListCollectionType() + { + return new ArrayListCollectionAdapter(); + } + + private static Type GetArrayListElementType(Type type) + { + return typeof(object); + } + + private static GenericListCollectionAdapter CreateGenericListCollectionType() + { + return new GenericListCollectionAdapter(); + } + + private static Type GetGenericListElementType(Type type) + { + return type.GetGenericArguments()[0]; + } + + private static GenericSetCollectionAdapter CreateGenericSetCollectionType() + { + return new GenericSetCollectionAdapter(); + } + + private static Type GetGenericSetElementType(Type type) + { + return type.GetGenericArguments()[0]; + } + + /// + /// + private delegate ICollectionAdapter CollectionTypeFactoryDelegate(); + + private delegate Type ElementTypeFactoryDelegate(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DefaultDefaultValueConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DefaultDefaultValueConvention.cs new file mode 100644 index 00000000..356b98cc --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DefaultDefaultValueConvention.cs @@ -0,0 +1,31 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DefaultDefaultValueConvention : IDefaultValueConvention + { + /// + /// + /// + public static readonly DefaultDefaultValueConvention Instance = new DefaultDefaultValueConvention(); + + /// + /// Initializes a new instance of the class. + /// + private DefaultDefaultValueConvention() + { + } + + /// + /// Gets the default value. + /// + /// The type. + /// + public object GetDefaultValue(Type type){ + return type.IsValueType ? Activator.CreateInstance(type) : null; + } + } +} diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DefaultDictionaryAdapterConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DefaultDictionaryAdapterConvention.cs new file mode 100644 index 00000000..e4b82038 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DefaultDictionaryAdapterConvention.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using MongoDB.Configuration.DictionaryAdapters; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + public class DefaultDictionaryAdapterConvention : IDictionarynAdapterConvention + { + /// + /// + private static readonly Dictionary DictionaryTypes = new Dictionary + { + {typeof(IDictionary), CreateHashtableType}, + {typeof(Hashtable), CreateHashtableType}, + {typeof(IEnumerable), CreateHashtableType} + }; + + /// + /// + public static readonly DefaultDictionaryAdapterConvention Instance = new DefaultDictionaryAdapterConvention(); + + private DefaultDictionaryAdapterConvention() + { + } + + /// + /// Gets the type of the dictionary. + /// + /// The type. + /// + public IDictionaryAdapter GetDictionaryAdapter(Type type) + { + DictionaryTypeFactoryDelegate factory; + if(DictionaryTypes.TryGetValue(type, out factory)) + return factory(); + + if(type.IsGenericType && !type.IsGenericTypeDefinition) + { + var genericType = type.GetGenericTypeDefinition(); + + if(genericType == typeof(SortedList<,>)) + { + var genericArgs = type.GetGenericArguments(); + var adapterType = typeof(GenericSortedListDictionaryAdapter<,>).MakeGenericType(genericArgs[0], genericArgs[1]); + return (IDictionaryAdapter)Activator.CreateInstance(adapterType); + } + + if(genericType == typeof(IDictionary<,>) || + genericType == typeof(Dictionary<,>)) + { + var genericArgs = type.GetGenericArguments(); + var adapterType = typeof(GenericDictionaryDictionaryAdapter<,>).MakeGenericType(genericArgs[0], genericArgs[1]); + return (IDictionaryAdapter)Activator.CreateInstance(adapterType); + } + } + + return null; + } + + private static IDictionaryAdapter CreateHashtableType() + { + return new HashtableDictionaryAdapter(); + } + + /// + /// + private delegate IDictionaryAdapter DictionaryTypeFactoryDelegate(); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdGeneratorConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdGeneratorConvention.cs new file mode 100644 index 00000000..7fba107a --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdGeneratorConvention.cs @@ -0,0 +1,37 @@ +using System; +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DefaultIdGeneratorConvention : IIdGeneratorConvention + { + /// + /// + public static readonly DefaultIdGeneratorConvention Instance = new DefaultIdGeneratorConvention(); + + /// + /// Initializes a new instance of the class. + /// + private DefaultIdGeneratorConvention() + { } + + /// + /// Gets the generator. + /// + /// The type. + /// + public IIdGenerator GetGenerator(Type type) + { + if (type == typeof(Oid)) + return new OidGenerator(); + + if (type == typeof(Guid)) + return new GuidCombGenerator(); + + return new AssignedIdGenerator(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdUnsavedValueConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdUnsavedValueConvention.cs new file mode 100644 index 00000000..fb3b78e3 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DefaultIdUnsavedValueConvention.cs @@ -0,0 +1,31 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DefaultIdUnsavedValueConvention : IIdUnsavedValueConvention + { + /// + /// + /// + public static readonly DefaultIdUnsavedValueConvention Instance = new DefaultIdUnsavedValueConvention(); + + /// + /// Initializes a new instance of the class. + /// + private DefaultIdUnsavedValueConvention() + { } + + /// + /// Gets the unsaved value. + /// + /// The type. + /// + public object GetUnsavedValue(Type type) + { + return type.IsValueType ? Activator.CreateInstance(type) : null; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateAliasConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateAliasConvention.cs new file mode 100644 index 00000000..06570140 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateAliasConvention.cs @@ -0,0 +1,32 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateAliasConvention : IAliasConvention + { + readonly Func _alias; + + /// + /// Initializes a new instance of the class. + /// + /// The alias. + public DelegateAliasConvention(Func alias) + { + _alias = alias; + } + + /// + /// Gets the alias. + /// + /// The member info. + /// + public string GetAlias(MemberInfo memberInfo) + { + return _alias(memberInfo); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateCollectionNameConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateCollectionNameConvention.cs new file mode 100644 index 00000000..62f0ecc5 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateCollectionNameConvention.cs @@ -0,0 +1,31 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateCollectionNameConvention : ICollectionNameConvention + { + private readonly Func _collectionName; + + /// + /// Initializes a new instance of the class. + /// + /// Name of the collection. + public DelegateCollectionNameConvention(Func collectionName) + { + _collectionName = collectionName; + } + + /// + /// Gets the name of the collection. + /// + /// Type of the entity. + /// + public string GetCollectionName(Type classType) + { + return _collectionName(classType); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorAliasConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorAliasConvention.cs new file mode 100644 index 00000000..d7a3df4b --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorAliasConvention.cs @@ -0,0 +1,31 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateDiscriminatorAliasConvention : IDiscriminatorAliasConvention + { + private readonly Func _discriminatorAlias; + + /// + /// Initializes a new instance of the class. + /// + /// The discriminator alias. + public DelegateDiscriminatorAliasConvention(Func discriminatorAlias) + { + _discriminatorAlias = discriminatorAlias; + } + + /// + /// Gets the discriminator alias. + /// + /// Type of the class. + /// + public string GetDiscriminatorAlias(Type classType) + { + return _discriminatorAlias(classType); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorConvention.cs new file mode 100644 index 00000000..b1458120 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateDiscriminatorConvention.cs @@ -0,0 +1,31 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateDiscriminatorConvention : IDiscriminatorConvention + { + private readonly Func _discriminator; + + /// + /// Initializes a new instance of the class. + /// + /// The discriminator. + public DelegateDiscriminatorConvention(Func discriminator) + { + _discriminator = discriminator; + } + + /// + /// Gets the discriminator. + /// + /// Type of the entity. + /// + public object GetDiscriminator(Type classType) + { + return _discriminator(classType); + } + } +} diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateExtendedPropertiesConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateExtendedPropertiesConvention.cs new file mode 100644 index 00000000..e38857a8 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateExtendedPropertiesConvention.cs @@ -0,0 +1,39 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateExtendedPropertiesConvention : MemberFinderBase, IExtendedPropertiesConvention + { + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + public DelegateExtendedPropertiesConvention(Func predicate) + : base(predicate) + { } + + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + /// The member types. + /// The binding flags. + public DelegateExtendedPropertiesConvention(Func predicate, MemberTypes memberTypes, BindingFlags bindingFlags) + : base(predicate, memberTypes, bindingFlags) + { } + + /// + /// Gets the member representing extended properties if one exists. + /// + /// Type of the entity. + /// + public MemberInfo GetExtendedPropertiesMember(Type classType) + { + return GetMember(classType); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/DelegateIdConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/DelegateIdConvention.cs new file mode 100644 index 00000000..d18d1637 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/DelegateIdConvention.cs @@ -0,0 +1,41 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public class DelegateIdConvention : MemberFinderBase, IIdConvention + { + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + public DelegateIdConvention(Func predicate) + : base(predicate) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + /// The member types. + /// The binding flags. + public DelegateIdConvention(Func predicate, MemberTypes memberTypes, BindingFlags bindingFlags) + : base(predicate, memberTypes, bindingFlags) + { + } + + /// + /// Gets the member representing the id if one exists. + /// + /// Type of the entity. + /// + public MemberInfo GetIdMember(Type classType) + { + return GetMember(classType); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IAliasConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IAliasConvention.cs new file mode 100644 index 00000000..43c4e060 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IAliasConvention.cs @@ -0,0 +1,17 @@ +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IAliasConvention + { + /// + /// Gets the alias. + /// + /// The member. + /// + string GetAlias(MemberInfo member); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/ICollectionAdapterConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/ICollectionAdapterConvention.cs new file mode 100644 index 00000000..e114bc37 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/ICollectionAdapterConvention.cs @@ -0,0 +1,26 @@ +using System; + +using MongoDB.Configuration.CollectionAdapters; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface ICollectionAdapterConvention + { + /// + /// Gets the collection adapter. + /// + /// The type. + /// + ICollectionAdapter GetCollectionAdapter(Type type); + + /// + /// Gets the type of the element. + /// + /// The type. + /// + Type GetElementType(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/ICollectionNameConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/ICollectionNameConvention.cs new file mode 100644 index 00000000..4c4152fa --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/ICollectionNameConvention.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface ICollectionNameConvention + { + /// + /// Gets the name of the collection. + /// + /// Type of the entity. + /// + string GetCollectionName(Type classType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IDefaultValueConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IDefaultValueConvention.cs new file mode 100644 index 00000000..4eb7d247 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IDefaultValueConvention.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IDefaultValueConvention + { + /// + /// Gets the default value. + /// + /// The type. + /// + object GetDefaultValue(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IDictionaryAdapterConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IDictionaryAdapterConvention.cs new file mode 100644 index 00000000..8cc54c3d --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IDictionaryAdapterConvention.cs @@ -0,0 +1,19 @@ +using System; + +using MongoDB.Configuration.DictionaryAdapters; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IDictionarynAdapterConvention + { + /// + /// Gets the dictionary adapter. + /// + /// The type. + /// + IDictionaryAdapter GetDictionaryAdapter(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorAliasConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorAliasConvention.cs new file mode 100644 index 00000000..480c6214 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorAliasConvention.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IDiscriminatorAliasConvention + { + /// + /// Gets the name of the discriminator property if one exists. + /// + /// The type. + /// + string GetDiscriminatorAlias(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorConvention.cs new file mode 100644 index 00000000..5393897f --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IDiscriminatorConvention.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IDiscriminatorConvention + { + /// + /// Gets the discriminator if one exists. + /// + /// The type. + /// + object GetDiscriminator(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IExtendedPropertiesConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IExtendedPropertiesConvention.cs new file mode 100644 index 00000000..4299471a --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IExtendedPropertiesConvention.cs @@ -0,0 +1,18 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IExtendedPropertiesConvention + { + /// + /// Gets the member representing extended properties if one exists. + /// + /// Type of the entity. + /// + MemberInfo GetExtendedPropertiesMember(Type classType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IIdConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IIdConvention.cs new file mode 100644 index 00000000..2dfd57c3 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IIdConvention.cs @@ -0,0 +1,18 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IIdConvention + { + /// + /// Gets the member representing the id if one exists. + /// + /// Type of the entity. + /// + MemberInfo GetIdMember(Type classType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IIdGeneratorConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IIdGeneratorConvention.cs new file mode 100644 index 00000000..2235bfe1 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IIdGeneratorConvention.cs @@ -0,0 +1,19 @@ +using System; + +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IIdGeneratorConvention + { + /// + /// Gets the generator. + /// + /// The type. + /// + IIdGenerator GetGenerator(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/IIdUnsavedValueConvention.cs b/source/MongoDB/Configuration/Mapping/Conventions/IIdUnsavedValueConvention.cs new file mode 100644 index 00000000..bd8eb675 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/IIdUnsavedValueConvention.cs @@ -0,0 +1,17 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public interface IIdUnsavedValueConvention + { + /// + /// Gets the unsaved value. + /// + /// The type. + /// + object GetUnsavedValue(Type type); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Conventions/MemberFinderBase.cs b/source/MongoDB/Configuration/Mapping/Conventions/MemberFinderBase.cs new file mode 100644 index 00000000..19d63258 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Conventions/MemberFinderBase.cs @@ -0,0 +1,67 @@ +using System; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Conventions +{ + /// + /// + /// + public abstract class MemberFinderBase + { + private readonly BindingFlags _bindingFlags; + private readonly MemberTypes _memberTypes; + private readonly Func _predicate; + + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + protected MemberFinderBase(Func predicate) + : this(predicate, MemberTypes.Property, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic) + { } + + /// + /// Initializes a new instance of the class. + /// + /// The predicate. + /// The member types. + /// The binding flags. + protected MemberFinderBase(Func predicate, MemberTypes memberTypes, BindingFlags bindingFlags) + { + _bindingFlags = bindingFlags; + _memberTypes = memberTypes; + _predicate = predicate; + } + + /// + /// Gets the member representing the id if one exists. + /// + /// The type. + /// + protected MemberInfo GetMember(Type type) + { + var foundMembers = type.FindMembers(_memberTypes, _bindingFlags, IsMatch, null); + + if (foundMembers.Length == 0) + return null; + if (foundMembers.Length == 1) + return foundMembers[0]; + + //Todo: use custom exception + throw new Exception("Too many members found matching the criteria."); + } + + /// + /// Determines whether the specified member info is match. + /// + /// The member info. + /// The criteria. + /// + /// true if the specified member info is match; otherwise, false. + /// + private bool IsMatch(MemberInfo memberInfo, object criteria) + { + return _predicate(memberInfo); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/IMappingStore.cs b/source/MongoDB/Configuration/Mapping/IMappingStore.cs new file mode 100644 index 00000000..0e504f86 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/IMappingStore.cs @@ -0,0 +1,19 @@ +using System; + +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Configuration.Mapping +{ + /// + /// + /// + public interface IMappingStore + { + /// + /// Gets the class map. + /// + /// Type of the class. + /// + IClassMap GetClassMap(Type classType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/ClassMap.cs b/source/MongoDB/Configuration/Mapping/Model/ClassMap.cs new file mode 100644 index 00000000..3093dfab --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/ClassMap.cs @@ -0,0 +1,51 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + public class ClassMap : ClassMapBase + { + /// + /// Initializes a new instance of the class. + /// + /// Type of the entity. + public ClassMap(Type classType) + : base(classType){ + } + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + public override string CollectionName { get; internal set; } + + /// + /// Gets the alias used to store the discriminator. + /// + /// The discriminator alias. + public override string DiscriminatorAlias { get; internal set; } + + /// + /// Gets the extended properties map. + /// + /// The extended properties map. + public override ExtendedPropertiesMap ExtendedPropertiesMap { get; internal set; } + + /// + /// Gets the id map. + /// + /// The id map. + public override IdMap IdMap { get; internal set; } + + /// + /// Gets a value indicating whether this class map is a subclass. + /// + /// + /// true if this class map is a subclass; otherwise, false. + /// + public override bool IsSubClass{ + get { return false; } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/ClassMapBase.cs b/source/MongoDB/Configuration/Mapping/Model/ClassMapBase.cs new file mode 100644 index 00000000..6b226b80 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/ClassMapBase.cs @@ -0,0 +1,284 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + public abstract class ClassMapBase : IClassMap + { + private readonly List _memberMaps; + private readonly List _subClassMaps; + private readonly bool _hasProtectedOrPublicConstructor; + + /// + /// Initializes a new instance of the class. + /// + /// Type of the entity. + protected ClassMapBase(Type classType) + { + if(classType == null) + throw new ArgumentNullException("classType"); + + ClassType = classType; + _memberMaps = new List(); + _subClassMaps = new List(); + _hasProtectedOrPublicConstructor = ClassType.GetConstructors(BindingFlags.Instance | + BindingFlags.Public | + BindingFlags.NonPublic) + .Any(c => !c.IsPrivate); + } + + /// + /// Gets the type of class to which this map pertains. + /// + /// The type of the class. + public Type ClassType { get; private set; } + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + public abstract string CollectionName { get; internal set; } + + /// + /// Gets the discriminator. + /// + /// The discriminator. + public object Discriminator { get; internal set; } + + /// + /// Gets the alias used to store the discriminator. + /// + /// The discriminator alias. + public abstract string DiscriminatorAlias { get; internal set; } + + /// + /// Gets the extended properties map. + /// + /// The extended properties map. + public abstract ExtendedPropertiesMap ExtendedPropertiesMap { get; internal set; } + + /// + /// Gets a value indicating whether this instance has discriminator. + /// + /// + /// true if this instance has discriminator; otherwise, false. + /// + public bool HasDiscriminator + { + get { return Discriminator != null; } + } + + /// + /// Gets a value indicating whether the class map has extended properties. + /// + /// + /// true if the class map has extended properties; otherwise, false. + /// + public bool HasExtendedProperties + { + get { return ExtendedPropertiesMap != null; } + } + + /// + /// Gets a value indicating whether the class map has an id. + /// + /// true if the class map has an id; otherwise, false. + public virtual bool HasId + { + get { return IdMap != null; } + } + + /// + /// Gets the id map. + /// + /// The id map. + public abstract IdMap IdMap { get; internal set; } + + /// + /// Gets a value indicating whether this class map is polymorphic. + /// + /// + /// true if this class map is polymorphic; otherwise, false. + /// + public virtual bool IsPolymorphic + { + get { return _subClassMaps.Count > 0; } + } + + /// + /// Gets a value indicating whether this class map is a subclass. + /// + /// + /// true if this class map is a subclass; otherwise, false. + /// + public abstract bool IsSubClass { get; } + + /// + /// Gets the member maps. + /// + /// The member maps. + public virtual IEnumerable MemberMaps + { + get { return _memberMaps.AsReadOnly(); } + } + + /// + /// Gets the sub class maps. + /// + /// The sub class maps. + public IEnumerable SubClassMaps + { + get { return _subClassMaps.AsReadOnly(); } + } + + /// + /// Creates an instance of the entity. + /// + /// + public object CreateInstance() + { + if(!_hasProtectedOrPublicConstructor) + throw new MissingMethodException("No public or protected constructor found on type " + ClassType.FullName); + + if (ClassType.IsAbstract) + throw new MongoException("Unable to create an instance of an abstract class."); + + //TODO: figure out how to support custom activators... + var instance = Activator.CreateInstance(ClassType, true); + + //initialize all default values in case something isn't specified when reader the document. + foreach(var memberMap in MemberMaps.Where(x => x.DefaultValue != null)) + memberMap.SetValue(instance, memberMap.DefaultValue); + + return instance; + } + + /// + /// Gets the class map from discriminator. + /// + /// The discriminator. + /// + public virtual IClassMap GetClassMapFromDiscriminator(object discriminator) + { + return GetClassMapFromDiscriminator(this, discriminator); + } + + /// + /// Gets the id of the specified entitiy. + /// + /// The entity. + /// + public object GetId(object entity) + { + if(!HasId) + throw new InvalidCastException(string.Format("{0} does not have a mapped id.", ClassType)); + + return IdMap.GetValue(entity); + } + + /// + /// Gets the member map from alias. + /// + /// Name of the property. + /// + public PersistentMemberMap GetMemberMapFromAlias(string propertyName) + { + if(HasId && IdMap.Alias == propertyName) + return IdMap; + + return MemberMaps.FirstOrDefault(memberMap => memberMap.Alias == propertyName); + } + + /// + /// Gets the member map that corresponds to the specified member name. + /// + /// Name of the member. + /// + public PersistentMemberMap GetMemberMapFromMemberName(string memberName) + { + if(HasId && IdMap.MemberName == memberName) + return IdMap; + + return MemberMaps.FirstOrDefault(memberMap => memberMap.MemberName == memberName); + } + + /// + /// Adds the member map. + /// + /// The member map. + internal void AddMemberMap(PersistentMemberMap memberMap) + { + _memberMaps.Add(memberMap); + } + + /// + /// Adds the member maps. + /// + /// The member maps. + internal void AddMemberMaps(IEnumerable memberMaps) + { + if (memberMaps.Any(m => m.Alias == "_id")) + throw new MongoException("_id is a reserved MongoDB alias and cannot be used for anything other than an Id column."); + + _memberMaps.AddRange(memberMaps); + } + + /// + /// Adds the sub class map. + /// + /// The sub class map. + internal void AddSubClassMap(SubClassMap subClassMap) + { + _subClassMaps.Add(subClassMap); + subClassMap.SuperClassMap = this; + } + + /// + /// Adds the sub class maps. + /// + /// The sub class maps. + internal void AddSubClassMaps(IEnumerable subClassMaps) + { + foreach(var subClassMap in subClassMaps) + AddSubClassMap(subClassMap); + } + + private static IClassMap GetClassMapFromDiscriminator(IClassMap classMap, object discriminator) + { + if(AreObjectsEqual(classMap.Discriminator, discriminator)) + return classMap; + + return + classMap.SubClassMaps.Select(subClassMap => GetClassMapFromDiscriminator(subClassMap, discriminator)).FirstOrDefault( + subSubClassMap => subSubClassMap != null); + } + + private static bool AreObjectsEqual(object a, object b) + { + if(a == null && b == null) + return true; + if(a == null || b == null) + return false; + + if(a is IEnumerable && b is IEnumerable) + { + var aEnum = ((IEnumerable)a).GetEnumerator(); + var bEnum = ((IEnumerable)b).GetEnumerator(); + while(aEnum.MoveNext() && bEnum.MoveNext()) + { + var v = AreObjectsEqual(aEnum.Current, bEnum.Current); + if(!v) + return false; + } + return true; + } + + return a.Equals(b); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/CollectionMemberMap.cs b/source/MongoDB/Configuration/Mapping/Model/CollectionMemberMap.cs new file mode 100644 index 00000000..fd7ef324 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/CollectionMemberMap.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections; + +using MongoDB.Configuration.CollectionAdapters; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public class CollectionMemberMap : PersistentMemberMap + { + private readonly ICollectionAdapter _collectionAdapter; + + /// + /// Gets the type of the element. + /// + /// The type of the element. + public Type ElementType { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// Type of the member return. + /// The getter. + /// The setter. + /// The alias. + /// if set to true [persist default value]. + /// Type of the collection. + /// Type of the element. + public CollectionMemberMap(string memberName, Type memberReturnType, Func getter, Action setter, string alias, bool persistDefaultValue, ICollectionAdapter collectionAdapter, Type elementType) + : base(memberName, memberReturnType, getter, setter, null, alias, persistDefaultValue) + { + _collectionAdapter = collectionAdapter; + ElementType = elementType; + } + + /// + /// Gets the value. + /// + /// The instance. + /// + public override object GetValue(object instance) + { + var elements = _collectionAdapter.GetElementsFromCollection(base.GetValue(instance)); + var list = new ArrayList(); + + foreach (var element in elements) + list.Add(element); + + return list.ToArray(); + } + + /// + /// Sets the value on the specified instance. + /// + /// The instance. + /// The value. + public override void SetValue(object instance, object value) + { + base.SetValue(instance, _collectionAdapter.CreateCollection(ElementType, (object[])value)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/DictionaryMemberMap.cs b/source/MongoDB/Configuration/Mapping/Model/DictionaryMemberMap.cs new file mode 100644 index 00000000..5d7b4b61 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/DictionaryMemberMap.cs @@ -0,0 +1,68 @@ +using System; +using MongoDB.Configuration.DictionaryAdapters; + + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public class DictionaryMemberMap : PersistentMemberMap + { + private readonly IDictionaryAdapter _dictionaryAdapter; + + /// + /// Gets the type of the key. + /// + /// The type of the key. + public Type KeyType + { + get { return _dictionaryAdapter.KeyType; } + } + + /// + /// Gets the type of the value. + /// + /// The type of the value. + public Type ValueType + { + get { return _dictionaryAdapter.ValueType; } + } + + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// The getter. + /// The setter. + /// The alias. + /// if set to true [persist default value]. + /// The dictionary adapter. + public DictionaryMemberMap(string memberName, Func getter, Action setter, string alias, bool persistDefaultValue, IDictionaryAdapter dictionaryAdapter) + : base(memberName, typeof(Document), getter, setter, null, alias, persistDefaultValue) + { + _dictionaryAdapter = dictionaryAdapter; + } + + /// + /// Gets the value. + /// + /// The instance. + /// + public override object GetValue(object instance) + { + var value = base.GetValue(instance); + return _dictionaryAdapter.GetDocument(value); + } + + /// + /// Sets the value on the specified instance. + /// + /// The instance. + /// The value. + public override void SetValue(object instance, object value) + { + base.SetValue(instance, _dictionaryAdapter.CreateDictionary((Document)value)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/ExtendedPropertiesMap.cs b/source/MongoDB/Configuration/Mapping/Model/ExtendedPropertiesMap.cs new file mode 100644 index 00000000..6ebd55e9 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/ExtendedPropertiesMap.cs @@ -0,0 +1,22 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public class ExtendedPropertiesMap : MemberMapBase + { + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// Type of the member return. + /// The getter. + /// The setter. + public ExtendedPropertiesMap(string memberName, Type memberReturnType, Func getter, Action setter) + : base(memberName, memberReturnType, getter, setter) + { + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/IClassMap.cs b/source/MongoDB/Configuration/Mapping/Model/IClassMap.cs new file mode 100644 index 00000000..e32e7d7b --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/IClassMap.cs @@ -0,0 +1,131 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public interface IClassMap + { + /// + /// Gets the type of class to which this map pertains. + /// + /// The type of the class. + Type ClassType { get; } + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + string CollectionName { get; } + + /// + /// Gets the discriminator. + /// + /// The discriminator. + object Discriminator { get; } + + /// + /// Gets the alias used to store the discriminator. + /// + /// The discriminator alias. + string DiscriminatorAlias { get; } + + /// + /// Gets the extended properties map. + /// + /// The extended properties map. + ExtendedPropertiesMap ExtendedPropertiesMap { get; } + + /// + /// Gets a value indicating whether this instance has discriminator. + /// + /// + /// true if this instance has discriminator; otherwise, false. + /// + bool HasDiscriminator { get; } + + /// + /// Gets a value indicating whether the class map has extended properties. + /// + /// + /// true if the class map has extended properties; otherwise, false. + /// + bool HasExtendedProperties { get; } + + /// + /// Gets a value indicating whether the class map has an id. + /// + /// true if the class map has an id; otherwise, false. + bool HasId { get; } + + /// + /// Gets the id map. + /// + /// The id map. + IdMap IdMap { get; } + + /// + /// Gets a value indicating whether this class map is polymorphic. + /// + /// + /// true if this class map is polymorphic; otherwise, false. + /// + bool IsPolymorphic { get; } + + /// + /// Gets a value indicating whether this class map is a subclass. + /// + /// + /// true if this class map is a subclass; otherwise, false. + /// + bool IsSubClass { get; } + + /// + /// Gets the member maps. + /// + /// The member maps. + IEnumerable MemberMaps { get; } + + /// + /// Gets the sub class maps. + /// + /// The sub class maps. + IEnumerable SubClassMaps { get; } + + /// + /// Creates an instance of the entity. + /// + /// + object CreateInstance(); + + /// + /// Gets the class map from the specified discriminator. + /// + /// The discriminator. + /// + IClassMap GetClassMapFromDiscriminator(object discriminator); + + /// + /// Gets the id of the specified entitiy. + /// + /// The entity. + /// + object GetId(object entity); + + /// + /// Gets the member map that corresponds to the specified alias. + /// + /// The alias. + /// + PersistentMemberMap GetMemberMapFromAlias(string alias); + + /// + /// Gets the member map that corresponds to the specified member name. + /// + /// Name of the member. + /// + PersistentMemberMap GetMemberMapFromMemberName(string memberName); + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/IdMap.cs b/source/MongoDB/Configuration/Mapping/Model/IdMap.cs new file mode 100644 index 00000000..ef72f776 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/IdMap.cs @@ -0,0 +1,46 @@ +using System; + +using MongoDB.Configuration.IdGenerators; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public sealed class IdMap : PersistentMemberMap + { + private readonly IIdGenerator _generator; + + /// + /// Gets the id's unsaved value. + /// + /// The unsaved value. + public object UnsavedValue { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// Type of the member. + /// The getter. + /// The setter. + /// The generator. + /// The unsaved value. + public IdMap(string memberName, Type memberType, Func getter, Action setter, IIdGenerator generator, object unsavedValue) + : base(memberName, memberType, getter, setter, null, "_id", true) + { + _generator = generator; + UnsavedValue = unsavedValue; + } + + /// + /// Generates the specified entity. + /// + /// The entity. + /// + public object Generate(object entity) + { + return _generator.Generate(entity, this); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/MemberMapBase.cs b/source/MongoDB/Configuration/Mapping/Model/MemberMapBase.cs new file mode 100644 index 00000000..3954200b --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/MemberMapBase.cs @@ -0,0 +1,72 @@ +using System; +using MongoDB.Configuration.Mapping.Util; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + public class MemberMapBase + { + private readonly Func _getter; + private readonly Action _setter; + + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// Type of the member return. + /// The getter. + /// The setter. + protected MemberMapBase(string memberName, Type memberReturnType, Func getter, Action setter) + { + if(memberReturnType == null) + throw new ArgumentNullException("memberReturnType"); + + _getter = getter; + MemberName = memberName; + MemberReturnType = memberReturnType; + _setter = setter; + } + + /// + /// Gets the name of the member. + /// + /// The name of the member. + public string MemberName { get; private set; } + + /// + /// Gets the type of the member return. + /// + /// The type of the member return. + public Type MemberReturnType { get; private set; } + + /// + /// Gets the value. + /// + /// The instance. + /// + public virtual object GetValue(object instance) + { + return _getter(instance); + } + + /// + /// Sets the value on the specified instance. + /// + /// The instance. + /// The value. + public virtual void SetValue(object instance, object value) + { + try + { + value = ValueConverter.Convert(value, MemberReturnType); + } + catch(MongoException exception) + { + throw new MongoException("Con not convert value on type " + instance.GetType(),exception); + } + + _setter(instance, value); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/PersistentMemberMap.cs b/source/MongoDB/Configuration/Mapping/Model/PersistentMemberMap.cs new file mode 100644 index 00000000..0ed942d8 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/PersistentMemberMap.cs @@ -0,0 +1,46 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public class PersistentMemberMap : MemberMapBase + { + /// + /// Gets the alias in which to store the value. + /// + /// The name. + public string Alias { get; private set; } + + /// + /// Gets a value indicating whether [default value]. + /// + /// true if [default value]; otherwise, false. + public object DefaultValue { get; private set; } + + /// + /// Gets or sets a value indicating whether the default value should be persisted. + /// + /// true if the default value should be persisted; otherwise, false. + public bool PersistDefaultValue { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// Name of the member. + /// Type of the member return. + /// The getter. + /// The setter. + /// The default value. + /// The alias. + /// if set to true [persist default value]. + public PersistentMemberMap(string memberName, Type memberReturnType, Func getter, Action setter, object defaultValue, string alias, bool persistDefaultValue) + : base(memberName, memberReturnType, getter, setter) + { + Alias = alias; + DefaultValue = defaultValue; + PersistDefaultValue = persistDefaultValue; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Model/SubClassMap.cs b/source/MongoDB/Configuration/Mapping/Model/SubClassMap.cs new file mode 100644 index 00000000..106e83f6 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Model/SubClassMap.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Configuration.Mapping.Model +{ + /// + /// + /// + public class SubClassMap : ClassMapBase + { + private IClassMap _superClassMap; + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + public override string CollectionName + { + get { return _superClassMap.CollectionName; } + internal set { throw new NotSupportedException("Cannot set the collection name on a subclass map."); } + } + + /// + /// Gets the alias used to store the discriminator. + /// + /// The discriminator alias. + public override string DiscriminatorAlias + { + get { return _superClassMap.DiscriminatorAlias; } + internal set { throw new NotSupportedException("Cannot set the discriminator property name on a subclass map."); } + } + + /// + /// Gets the extended properties map. + /// + /// The extended properties map. + public override ExtendedPropertiesMap ExtendedPropertiesMap + { + get { return _superClassMap.ExtendedPropertiesMap; } + internal set { throw new NotSupportedException("Cannot set the extended properties map on a subclass map."); } + } + + /// + /// Gets the id map. + /// + /// The id map. + public override IdMap IdMap + { + get { return _superClassMap.IdMap; } + internal set { throw new NotSupportedException("Cannot set the id map on a subclass map."); } + } + + /// + /// Gets a value indicating whether this class map is a subclass. + /// + /// + /// true if this class map is a subclass; otherwise, false. + /// + public override bool IsSubClass + { + get { return true; } + } + + /// + /// Gets the member maps. + /// + /// The member maps. + public override IEnumerable MemberMaps + { + get{return _superClassMap.MemberMaps.Concat(base.MemberMaps);} + } + + /// + /// Gets or sets the super class map. + /// + /// The super class map. + public IClassMap SuperClassMap + { + get { return _superClassMap; } + internal set { _superClassMap = value; } + } + + /// + /// Initializes a new instance of the class. + /// + /// Type of the entity. + public SubClassMap(Type classType) + : base(classType) + { } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Util/MemberReflectionOptimizer.cs b/source/MongoDB/Configuration/Mapping/Util/MemberReflectionOptimizer.cs new file mode 100644 index 00000000..108f5b92 --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Util/MemberReflectionOptimizer.cs @@ -0,0 +1,241 @@ +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using System.Reflection; +using System.Reflection.Emit; +using MongoDB.Util; + +namespace MongoDB.Configuration.Mapping.Util +{ + /// + /// + public static class MemberReflectionOptimizer + { + private static readonly Dictionary> GetterCache = new Dictionary>(); + private static readonly Dictionary> SetterCache = new Dictionary>(); + private static readonly object SyncObject = new object(); + + /// + /// Gets the getter. + /// + /// The member info. + /// + public static Func GetGetter(MemberInfo memberInfo) + { + if(memberInfo == null) + throw new ArgumentNullException("memberInfo"); + if(memberInfo.MemberType != MemberTypes.Field && memberInfo.MemberType != MemberTypes.Property) + throw new ArgumentException("Only fields and properties are supported.", "memberInfo"); + + if(memberInfo.MemberType == MemberTypes.Field) + return GetFieldGetter(memberInfo as FieldInfo); + + if(memberInfo.MemberType == MemberTypes.Property) + return GetPropertyGetter(memberInfo as PropertyInfo); + + throw new InvalidOperationException("Can only create getters for fields or properties."); + } + + /// + /// Gets the field getter. + /// + /// The field info. + /// + public static Func GetFieldGetter(FieldInfo fieldInfo) + { + if(fieldInfo == null) + throw new ArgumentNullException("fieldInfo"); + + var key = CreateKey(fieldInfo); + + Func getter; + lock (SyncObject) + { + if (GetterCache.TryGetValue(key, out getter)) + return getter; + } + //We release the lock here, so the relatively time consuming compiling + //does not imply contention. The price to pay is potential multiple compilations + //of the same expression... + var instanceParameter = Expression.Parameter(typeof (object), "target"); + + var member = Expression.Field(Expression.Convert(instanceParameter, fieldInfo.DeclaringType), fieldInfo); + + var lambda = Expression.Lambda>( + Expression.Convert(member, typeof (object)), + instanceParameter); + + getter = lambda.Compile(); + + lock(SyncObject) + { + GetterCache[key] = getter; + } + + return getter; + } + + /// + /// Gets the property getter. + /// + /// The property info. + /// + public static Func GetPropertyGetter(PropertyInfo propertyInfo) + { + if(propertyInfo == null) + throw new ArgumentNullException("propertyInfo"); + + var key = CreateKey(propertyInfo); + + Func getter; + + lock (SyncObject) + { + if (GetterCache.TryGetValue(key, out getter)) + return getter; + } + + if(!propertyInfo.CanRead) + throw new InvalidOperationException("Cannot create a getter for a writeonly property."); + + var instanceParameter = Expression.Parameter(typeof(object), "target"); + + var member = Expression.Property(Expression.Convert(instanceParameter, propertyInfo.DeclaringType), propertyInfo); + + var lambda = Expression.Lambda>( + Expression.Convert(member, typeof(object)), + instanceParameter); + + getter = lambda.Compile(); + + lock (SyncObject) + { + GetterCache[key] = getter; + } + return getter; + } + + /// + /// Gets the setter. + /// + /// The member info. + /// + public static Action GetSetter(MemberInfo memberInfo) + { + if(memberInfo == null) + throw new ArgumentNullException("memberInfo"); + if(memberInfo.MemberType != MemberTypes.Field && memberInfo.MemberType != MemberTypes.Property) + throw new ArgumentException("Only fields and properties are supported.", "memberInfo"); + + if(memberInfo.MemberType == MemberTypes.Field) + return GetFieldSetter(memberInfo as FieldInfo); + + if(memberInfo.MemberType == MemberTypes.Property) + return GetPropertySetter(memberInfo as PropertyInfo); + + throw new InvalidOperationException("Can only create setters for fields or properties."); + } + + /// + /// Gets the field setter. + /// + /// The field info. + /// + public static Action GetFieldSetter(FieldInfo fieldInfo) + { + if(fieldInfo == null) + throw new ArgumentNullException("fieldInfo"); + + var key = CreateKey(fieldInfo); + + Action setter; + + lock (SyncObject) + { + if (SetterCache.TryGetValue(key, out setter)) + return setter; + } + + if (fieldInfo.IsInitOnly || fieldInfo.IsLiteral) + throw new InvalidOperationException("Cannot create a setter for a readonly field."); + + var sourceType = fieldInfo.DeclaringType; + var method = new DynamicMethod("Set" + fieldInfo.Name, null, new[] {typeof (object), typeof (object)}, true); + var gen = method.GetILGenerator(); + + gen.Emit(OpCodes.Ldarg_0); + gen.Emit(OpCodes.Castclass, sourceType); + gen.Emit(OpCodes.Ldarg_1); + gen.Emit(OpCodes.Unbox_Any, fieldInfo.FieldType); + gen.Emit(OpCodes.Stfld, fieldInfo); + gen.Emit(OpCodes.Ret); + + setter = (Action) method.CreateDelegate(typeof (Action)); + + lock (SyncObject) + { + SetterCache[key] = setter; + } + + return setter; + } + + /// + /// Gets the property setter. + /// + /// The property info. + /// + public static Action GetPropertySetter(PropertyInfo propertyInfo) + { + if(propertyInfo == null) + throw new ArgumentNullException("propertyInfo"); + + var key = CreateKey(propertyInfo); + + Action setter; + + lock (SyncObject) + { + if (SetterCache.TryGetValue(key, out setter)) + return setter; + } + + if (!propertyInfo.CanWrite) + throw new InvalidOperationException("Cannot create a setter for a readonly property."); + + var instanceParameter = Expression.Parameter(typeof (object), "target"); + var valueParameter = Expression.Parameter(typeof (object), "value"); + + var lambda = Expression.Lambda>( + Expression.Call( + Expression.Convert(instanceParameter, propertyInfo.DeclaringType), + propertyInfo.GetSetMethod(true), + Expression.Convert(valueParameter, propertyInfo.PropertyType)), + instanceParameter, + valueParameter); + + setter = lambda.Compile(); + + lock (SyncObject) + { + SetterCache[key] = setter; + } + + return setter; + } + + /// + /// Creates the key. + /// + /// The member info. + /// + private static string CreateKey(MemberInfo memberInfo) + { + return string.Format("{0}_{1}_{2}_{3}", + memberInfo.DeclaringType.FullName, + memberInfo.MemberType, + memberInfo.GetReturnType(), + memberInfo.Name); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/Mapping/Util/ValueConverter.cs b/source/MongoDB/Configuration/Mapping/Util/ValueConverter.cs new file mode 100644 index 00000000..7b5207df --- /dev/null +++ b/source/MongoDB/Configuration/Mapping/Util/ValueConverter.cs @@ -0,0 +1,65 @@ +using System; + +namespace MongoDB.Configuration.Mapping.Util +{ + internal static class ValueConverter + { + public static object Convert(object value, Type type) + { + var valueType = value != null ? value.GetType() : typeof(object); + + if(value==null) + return null; + + if(valueType != type) + try + { + var code = System.Convert.GetTypeCode(value); + + if(type.IsEnum) + if(value is string) + value = Enum.Parse(type, (string)value); + else + value = Enum.ToObject(type, value); + else if(type.IsGenericType && + type.GetGenericTypeDefinition() == typeof(Nullable<>)) + value = System.Convert.ChangeType(value, Nullable.GetUnderlyingType(type)); + else if(code != TypeCode.Object) + value = System.Convert.ChangeType(value, type); + else if(valueType==typeof(Binary)&&type==typeof(byte[])) + value = (byte[])(Binary)value; + } + catch(FormatException exception) + { + throw new MongoException("Can not convert value from " + valueType + " to " + type, exception); + } + catch(ArgumentException exception) + { + throw new MongoException("Can not convert value from " + valueType + " to " + type, exception); + } + + return value; + } + + public static Array ConvertArray(object[] elements, Type type) + { + var array = Array.CreateInstance(type, elements.Length); + + for(var i = 0; i < elements.Length; i++) + array.SetValue(Convert(elements[i], type), i); + + return array; + } + + public static string ConvertKey(object key) + { + if(key == null) + throw new ArgumentNullException("key"); + + if(key is Enum) + return System.Convert.ToInt64(key).ToString(); + + return key.ToString(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/MongoConfiguration.cs b/source/MongoDB/Configuration/MongoConfiguration.cs new file mode 100644 index 00000000..a18d0d6a --- /dev/null +++ b/source/MongoDB/Configuration/MongoConfiguration.cs @@ -0,0 +1,139 @@ +using System; +using MongoDB.Configuration.Mapping; +using MongoDB.Serialization; + +namespace MongoDB.Configuration +{ + /// + /// + public class MongoConfiguration + { + private static MongoConfiguration _default; + + private string _connectionString; + private IMappingStore _mappingStore; + private bool _readLocalTime; + private ISerializationFactory _serializationFactory; + + /// + /// Initializes a new instance of the class. + /// + public MongoConfiguration() + { + IsModifiable = true; + _connectionString = string.Empty; + _mappingStore = new AutoMappingStore(); + _serializationFactory = new SerializationFactory(this); + _readLocalTime = true; + } + + /// + /// Gets the default. + /// + /// The default. + public static MongoConfiguration Default + { + get + { + if(_default == null) + { + var configuration = new MongoConfiguration(); + var section = MongoConfigurationSection.GetSection(); + if(section != null) + section.UpdateConfiguration(configuration); + _default = configuration; + } + + return _default; + } + } + + /// + /// + public bool IsModifiable { get; private set; } + + /// + /// Gets or sets the connection string. + /// + /// The connection string. + public string ConnectionString + { + get { return _connectionString; } + set + { + TryModify(); + _connectionString = value; + } + } + + /// + /// Gets or sets the serialization factory. + /// + /// The serialization factory. + public ISerializationFactory SerializationFactory + { + get { return _serializationFactory; } + set + { + TryModify(); + _serializationFactory = value; + } + } + + /// + /// Gets or sets the mapping store. + /// + /// The mapping store. + public IMappingStore MappingStore + { + get { return _mappingStore; } + set + { + TryModify(); + _mappingStore = value; + } + } + + /// + /// Reads DataTime from server as local time. + /// + /// true if [read local time]; otherwise, false. + /// + /// MongoDB stores all time values in UTC timezone. If true the + /// time is converted from UTC to local timezone after is was read. + /// + public bool ReadLocalTime + { + get { return _readLocalTime; } + set + { + TryModify(); + _readLocalTime = value; + } + } + + /// + /// Ensures the modifiable. + /// + protected void TryModify() + { + if(!IsModifiable) + throw new InvalidOperationException("Value can not not be modified"); + } + + /// + /// Validates the and seal. + /// + public virtual void ValidateAndSeal() + { + if(ConnectionString == null) + throw new MongoException("ConnectionString can not be null"); + if(MappingStore == null) + throw new MongoException("MappingStore can not be null"); + if(SerializationFactory == null) + throw new MongoException("SerializationFactory can not be null"); + + IsModifiable = false; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/MongoConfigurationBuilder.cs b/source/MongoDB/Configuration/MongoConfigurationBuilder.cs new file mode 100644 index 00000000..c328dc4a --- /dev/null +++ b/source/MongoDB/Configuration/MongoConfigurationBuilder.cs @@ -0,0 +1,92 @@ +using System; +using System.Configuration; + +using MongoDB.Configuration.Builders; +using MongoDB.Configuration.Mapping; + +namespace MongoDB.Configuration +{ + /// + /// + /// + public class MongoConfigurationBuilder + { + private string _connectionString; + private MappingStoreBuilder _mappingStoreBuilder; + + /// + /// Builds the configuration. + /// + /// + public MongoConfiguration BuildConfiguration() + { + if (_mappingStoreBuilder == null) + return new MongoConfiguration { + ConnectionString = _connectionString + }; + + return new MongoConfiguration { + ConnectionString = _connectionString, + MappingStore = _mappingStoreBuilder.BuildMappingStore() + }; + } + + /// + /// Builds the mapping store. + /// + public IMappingStore BuildMappingStore() + { + if (_mappingStoreBuilder == null) + return new AutoMappingStore(); + + return _mappingStoreBuilder.BuildMappingStore(); + } + + /// + /// Sets the connection string. + /// + /// The connection string. + public void ConnectionString(string connectionString) + { + _connectionString = connectionString; + } + + /// + /// Builds the connection string. + /// + /// The config. + public void ConnectionString(Action config) + { + if (config == null) + throw new ArgumentNullException("config"); + + var builder = new MongoConnectionStringBuilder(); + config(builder); + _connectionString = builder.ToString(); + } + + /// + /// Set the apps settings key from which to pull the connection string, + /// + /// The key. + public void ReadConnectionStringFromAppSettings(string key) + { + _connectionString = ConfigurationManager.AppSettings[key]; + } + + /// + /// Configures the mapping. + /// + /// The config. + public void Mapping(Action config) + { + if (config == null) + throw new ArgumentNullException("config"); + + if (_mappingStoreBuilder == null) + _mappingStoreBuilder = new MappingStoreBuilder(); + + config(_mappingStoreBuilder); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Configuration/MongoConfigurationSection.cs b/source/MongoDB/Configuration/MongoConfigurationSection.cs new file mode 100644 index 00000000..b8e8281a --- /dev/null +++ b/source/MongoDB/Configuration/MongoConfigurationSection.cs @@ -0,0 +1,96 @@ +using System; +using System.Configuration; +using System.Linq; +using MongoDB.Configuration.Section; + +namespace MongoDB.Configuration +{ + /// + /// + /// + public class MongoConfigurationSection : ConfigurationSection + { + /// + /// + /// + public const string DefaultSectionName = "mongo"; + + /// + /// Gets the connections. + /// + /// The connections. + [ConfigurationProperty("connections", IsDefaultCollection = false)] + [ConfigurationCollection(typeof(ConnectionCollection), + AddItemName = "add", + ClearItemsName = "clear", + RemoveItemName = "remove")] + public ConnectionCollection Connections{ + get{return (ConnectionCollection)this["connections"];} + } + + /// + /// Reads DataTime from server as local time. + /// + /// true if [read local time]; otherwise, false. + /// + /// MongoDB stores all time values in UTC timezone. If true the + /// time is converted from UTC to local timezone after is was read. + /// + [ConfigurationProperty("readLocalTime", DefaultValue = true)] + public bool ReadLocalTime + { + get{return (bool)this["readLocalTime"];} + } + + /// + /// Gets the section with name Mongo. + /// + /// + public static MongoConfigurationSection GetSection() + { + return GetSection(DefaultSectionName); + } + + /// + /// Gets the section. + /// + /// The name. + /// + public static MongoConfigurationSection GetSection(string name) + { + return ConfigurationManager.GetSection(name) as MongoConfigurationSection; + } + + /// + /// Creates the configuration. + /// + /// + public MongoConfiguration CreateConfiguration() + { + var configuration = new MongoConfiguration(); + + UpdateConfiguration(configuration); + + return configuration; + } + + /// + /// Updates the configuration. + /// + /// The configuration. + public void UpdateConfiguration(MongoConfiguration configuration) + { + if(configuration == null) + throw new ArgumentNullException("configuration"); + + if(Connections!=null) + { + var connection = Connections.Cast().FirstOrDefault(c=>c.IsDefault); + if(connection != null) + configuration.ConnectionString = connection.ConnectionString; + } + + configuration.ReadLocalTime = ReadLocalTime; + } + } +} diff --git a/source/MongoDB/Configuration/Section/ConnectionCollection.cs b/source/MongoDB/Configuration/Section/ConnectionCollection.cs new file mode 100644 index 00000000..44c89b0b --- /dev/null +++ b/source/MongoDB/Configuration/Section/ConnectionCollection.cs @@ -0,0 +1,131 @@ +using System; +using System.Configuration; + +namespace MongoDB.Configuration.Section +{ + /// + /// + /// + public class ConnectionCollection : ConfigurationElementCollection + { + /// + /// Gets the type of the . + /// + /// + /// + /// The of this collection. + /// + public override ConfigurationElementCollectionType CollectionType { + get { return ConfigurationElementCollectionType.AddRemoveClearMap; } + } + + /// + /// When overridden in a derived class, creates a new . + /// + /// + /// A new . + /// + protected override ConfigurationElement CreateNewElement () + { + return new ConnectionElement (); + } + + /// + /// Gets the element key for a specified configuration element when overridden in a derived class. + /// + /// The to return the key for. + /// + /// An that acts as the key for the specified . + /// + protected override Object GetElementKey (ConfigurationElement element) + { + return ((ConnectionElement)element).Name; + } + + /// + /// Gets or sets the at the specified index. + /// + /// + public ConnectionElement this[int index] { + get { return (ConnectionElement)BaseGet (index); } + set { + if (BaseGet (index) != null) { + BaseRemoveAt (index); + } + BaseAdd (index, value); + } + } + + /// + /// Gets the with the specified name. + /// + /// + public new ConnectionElement this[string Name] { + get { return (ConnectionElement)BaseGet (Name); } + } + + /// + /// Indexes the of. + /// + /// The connection. + /// + public int IndexOf (ConnectionElement connection) + { + return BaseIndexOf (connection); + } + + /// + /// Adds the specified connection. + /// + /// The connection. + public void Add (ConnectionElement connection) + { + BaseAdd (connection); + } + + /// + /// Adds a configuration element to the . + /// + /// The to add. + protected override void BaseAdd (ConfigurationElement element) + { + BaseAdd (element, false); + } + + /// + /// Removes the specified connection. + /// + /// The connection. + public void Remove (ConnectionElement connection) + { + if (BaseIndexOf (connection) >= 0) + BaseRemove (connection.Name); + } + + /// + /// Removes at. + /// + /// The index. + public void RemoveAt (int index) + { + BaseRemoveAt (index); + } + + /// + /// Removes the specified name. + /// + /// The name. + public void Remove (string name) + { + BaseRemove (name); + } + + /// + /// Clears this instance. + /// + public void Clear () + { + BaseClear (); + } + } +} diff --git a/source/MongoDB/Configuration/Section/ConnectionElement.cs b/source/MongoDB/Configuration/Section/ConnectionElement.cs new file mode 100644 index 00000000..f2e324fb --- /dev/null +++ b/source/MongoDB/Configuration/Section/ConnectionElement.cs @@ -0,0 +1,43 @@ +using System; +using System.Configuration; + +namespace MongoDB.Configuration.Section +{ + + /// + /// + /// + public class ConnectionElement : ConfigurationElement + { + /// + /// Gets or sets the name. + /// + /// The name. + [ConfigurationProperty("key", IsRequired = true)] + public string Name{ + get{return (String)this["key"];} + set{this["key"] = value;} + } + + /// + /// Gets or sets the connection string. + /// + /// The connection string. + [ConfigurationProperty("connectionString", DefaultValue = "Server=localhost:27017")] + public string ConnectionString{ + get { return (String)this["connectionString"]; } + set { this["connectionString"] = value; } + } + + /// + /// Gets a value indicating whether this instance is default. + /// + /// + /// true if this instance is default; otherwise, false. + /// + public bool IsDefault + { + get { return string.IsNullOrEmpty(Name) || Name.EndsWith("default", StringComparison.InvariantCultureIgnoreCase); } + } + } +} diff --git a/source/MongoDB/Connections/Connection.cs b/source/MongoDB/Connections/Connection.cs new file mode 100644 index 00000000..c0d80a07 --- /dev/null +++ b/source/MongoDB/Connections/Connection.cs @@ -0,0 +1,388 @@ +using System; +using System.IO; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Configuration; +using MongoDB.Protocol; +using MongoDB.Results; +using MongoDB.Serialization; +using MongoDB.Util; + +namespace MongoDB.Connections +{ + /// + /// Connection is a managment unit which uses a RawConnection from connection pool + /// to comunicate with the server. + /// + /// If an connection error occure, the RawConnection is transparently replaced + /// by a new fresh connection. + /// + /// + internal class Connection : IDisposable + { + private readonly IConnectionFactory _factory; + private RawConnection _connection; + private bool _disposed; + + /// + /// Initializes a new instance of the class. + /// + /// The pool. + public Connection(IConnectionFactory factory) + { + if (factory == null) + throw new ArgumentNullException ("factory"); + + _factory = factory; + } + + /// + /// Releases unmanaged resources and performs other cleanup operations before the + /// is reclaimed by garbage collection. + /// + ~Connection (){ + // make sure the connection returns to pool if the user forget it. + Dispose (false); + } + + /// + /// Gets the connection string. + /// + /// The connection string. + public string ConnectionString { + get { return _factory.ConnectionString; } + } + + /// + /// Gets the end point. + /// + /// The end point. + public MongoServerEndPoint EndPoint{ + get { return _connection == null ? null : _connection.EndPoint; } + } + + /// + /// Sends the two way message. + /// + /// The MSG. + /// The database. + /// + public ReplyMessage SendTwoWayMessage(IRequestMessage message, string database){ + return SendTwoWayMessage(message,new BsonReaderSettings(), database); + } + + /// + /// Used for sending a message that gets a reply such as a query. + /// + /// + /// The message. + /// The reader settings. + /// The database. + /// + /// A reconnect will be issued but it is up to the caller to handle the error. + public ReplyMessage SendTwoWayMessage(IRequestMessage message, BsonReaderSettings readerSettings, string database) where T:class { + AuthenticateIfRequired(database); + + return SendTwoWayMessageCore(message, readerSettings); + } + + /// + /// Sends the two way message core. + /// + /// + /// The message. + /// The reader settings. + /// + internal ReplyMessage SendTwoWayMessageCore(IRequestMessage message, BsonReaderSettings readerSettings) where T : class + { + EnsureOpenConnection(); + + try + { + var reply = new ReplyMessage(readerSettings); + lock(_connection) + { + message.Write(_connection.GetStream()); + reply.Read(_connection.GetStream()); + } + return reply; + } + catch(IOException) + { + ReplaceInvalidConnection(); + throw; + } + } + + /// + /// Used for sending a message that gets no reply such as insert or update. + /// + /// The message. + /// The database. + /// A reconnect will be issued but it is up to the caller to handle the error. + public void SendMessage(IRequestMessage message, string database){ + AuthenticateIfRequired(database); + + SendMessageCore(message); + } + + /// + /// Sends the message core. + /// + /// The message. + internal void SendMessageCore(IRequestMessage message) + { + EnsureOpenConnection(); + + try + { + lock(_connection) + { + message.Write(_connection.GetStream()); + } + } + catch(IOException) + { + //Sending doesn't seem to always trigger the detection of a closed socket. + ReplaceInvalidConnection(); + throw; + } + } + + /// + /// Gets a value indicating whether this instance is connected. + /// + /// + /// true if this instance is connected; otherwise, false. + /// + public bool IsConnected + { + get { return _connection != null && _connection.IsConnected; } + } + + /// + /// Just sends a simple message string to the database. + /// + /// + /// A + /// + public void SendMsgMessage (String message){ + SendMessageCore(new MsgMessage{Message = message}); + } + + /// + /// Opens this instance. + /// + public void Open (){ + _connection = _factory.Open(); + } + + /// + /// Closes this instance. + /// + public void Close (){ + if (_connection == null) + return; + + _factory.Close(_connection); + _connection = null; + } + + /// + /// Replaces the invalid connection. + /// + private void ReplaceInvalidConnection (){ + if (_connection == null) + return; + + _connection.MarkAsInvalid (); + _factory.Close (_connection); + _connection = _factory.Open(); + } + + /// + /// Gets the stream. + /// + /// + internal Stream GetStream (){ + return _connection.GetStream (); + } + + /// + /// Sends the command. + /// + /// The factory. + /// The database. + /// Type of the command. + /// The command. + /// + public Document SendCommand(ISerializationFactory factory, string database, Type rootType, Document command) + { + AuthenticateIfRequired(database); + + var result = SendCommandCore(factory, database, rootType, command); + + if(!Convert.ToBoolean(result["ok"])) + { + var msg = string.Empty; + if(result.ContainsKey("msg")) + msg = (string)result["msg"]; + else if(result.ContainsKey("errmsg")) + msg = (string)result["errmsg"]; + throw new MongoCommandException(msg, result, command); + } + + return result; + } + + /// + /// Sends the command. + /// + /// + /// The factory. + /// The database. + /// Type of serialization root. + /// The spec. + /// + public T SendCommand(ISerializationFactory factory, string database, Type rootType, object command) + where T : CommandResultBase + { + AuthenticateIfRequired(database); + + var result = SendCommandCore(factory, database, rootType, command); + + if(!result.Success) + throw new MongoCommandException(result.ErrorMessage, null, null); + + return result; + } + + /// + /// Sends the command core. + /// + /// + /// The factory. + /// The database. + /// Type of serialization root. + /// The spec. + /// + private T SendCommandCore(ISerializationFactory factory, string database, Type rootType, object command) + where T : class + { + var writerSettings = factory.GetBsonWriterSettings(rootType); + + var query = new QueryMessage(writerSettings) + { + FullCollectionName = database + ".$cmd", + NumberToReturn = -1, + Query = command + }; + + var readerSettings = factory.GetBsonReaderSettings(typeof(T)); + + try + { + var reply = SendTwoWayMessageCore(query, readerSettings); + + if(reply.CursorId > 0) + SendMessage(new KillCursorsMessage(reply.CursorId),database); + + return reply.Documents.FirstOrDefault(); + } + catch(IOException exception) + { + throw new MongoConnectionException("Could not read data, communication failure", this, exception); + } + } + + /// + /// Authenticates the on first request. + /// + /// Name of the database. + private void AuthenticateIfRequired(string databaseName) + { + if(databaseName == null) + throw new ArgumentNullException("databaseName"); + EnsureOpenConnection(); + + if(_connection.IsAuthenticated(databaseName)) + return; + + var builder = new MongoConnectionStringBuilder(ConnectionString); + + if(string.IsNullOrEmpty(builder.Username)) + return; + + var serializationFactory = MongoConfiguration.Default.SerializationFactory; + + var document = new Document().Add("getnonce", 1.0); + var nonceResult = SendCommandCore(serializationFactory, databaseName, typeof(Document), document); + var nonce = (string)nonceResult["nonce"]; + + if(nonce == null) + throw new MongoException("Error retrieving nonce", null); + + var pwd = MongoHash.Generate(builder.Username + ":mongo:" + builder.Password); + var auth = new Document{ + {"authenticate", 1.0}, + {"user", builder.Username}, + {"nonce", nonce}, + {"key", MongoHash.Generate(nonce + builder.Username + pwd)} + }; + try + { + var result = SendCommandCore(serializationFactory, databaseName, typeof(Document), auth); + + if(!Convert.ToBoolean(result["ok"])) + throw new MongoException("Authentication faild for " + builder.Username); + } + catch(MongoCommandException exception) + { + //Todo: use custom exception? + throw new MongoException("Authentication faild for " + builder.Username, exception); + } + + _connection.MarkAuthenticated(databaseName); + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose (){ + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases unmanaged and - optionally - managed resources + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + protected virtual void Dispose(bool disposing) + { + if(_disposed) + return; + + if (disposing) + { + // Cleanup Managed Resources Here + Close(); + } + + // Cleanup Unmanaged Resources Here + + // Then mark object as disposed + _disposed = true; + } + + /// + /// Ensures the open connection. + /// + private void EnsureOpenConnection() + { + if(IsConnected) + return; + + throw new MongoConnectionException("Operation cannot be performed on a closed connection.", ConnectionString, null); + } + } +} diff --git a/MongoDBDriver/Connections/ConnectionFactoryBase.cs b/source/MongoDB/Connections/ConnectionFactoryBase.cs similarity index 81% rename from MongoDBDriver/Connections/ConnectionFactoryBase.cs rename to source/MongoDB/Connections/ConnectionFactoryBase.cs index 6efe0051..1a479fd4 100644 --- a/MongoDBDriver/Connections/ConnectionFactoryBase.cs +++ b/source/MongoDB/Connections/ConnectionFactoryBase.cs @@ -1,8 +1,12 @@ using System; +using System.Net.Sockets; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { - public abstract class ConnectionFactoryBase : IConnectionFactory + /// + /// + /// + internal abstract class ConnectionFactoryBase : IConnectionFactory { private int _endPointPointer; @@ -55,7 +59,12 @@ public virtual void Cleanup(){ protected RawConnection CreateRawConnection() { var endPoint = GetNextEndPoint(); - return new RawConnection(endPoint, Builder.ConnectionTimeout); + try + { + return new RawConnection(endPoint, Builder.ConnectionTimeout); + }catch(SocketException exception){ + throw new MongoConnectionException("Failed to connect to server " + endPoint, ConnectionString, endPoint, exception); + } } /// diff --git a/MongoDBDriver/Connections/ConnectionFactory.cs b/source/MongoDB/Connections/ConnectionFactoryFactory.cs similarity index 89% rename from MongoDBDriver/Connections/ConnectionFactory.cs rename to source/MongoDB/Connections/ConnectionFactoryFactory.cs index e85c1f7b..a8b64780 100644 --- a/MongoDBDriver/Connections/ConnectionFactory.cs +++ b/source/MongoDB/Connections/ConnectionFactoryFactory.cs @@ -2,9 +2,12 @@ using System.Collections.Generic; using System.Threading; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { - public static class ConnectionFactory + /// + /// + /// + internal static class ConnectionFactoryFactory { private static readonly TimeSpan MaintenaceWakeup = TimeSpan.FromSeconds(10); private static readonly Timer MaintenanceTimer = new Timer(o => OnMaintenaceWakeup()); @@ -12,9 +15,9 @@ public static class ConnectionFactory private static readonly object SyncObject = new object(); /// - /// Initializes the class. + /// Initializes the class. /// - static ConnectionFactory() + static ConnectionFactoryFactory() { MaintenanceTimer.Change(MaintenaceWakeup, MaintenaceWakeup); } diff --git a/MongoDBDriver/Connections/IConnectionFactory.cs b/source/MongoDB/Connections/IConnectionFactory.cs similarity index 79% rename from MongoDBDriver/Connections/IConnectionFactory.cs rename to source/MongoDB/Connections/IConnectionFactory.cs index 95c59423..f36c066b 100644 --- a/MongoDBDriver/Connections/IConnectionFactory.cs +++ b/source/MongoDB/Connections/IConnectionFactory.cs @@ -1,8 +1,11 @@ using System; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { - public interface IConnectionFactory : IDisposable + /// + /// + /// + internal interface IConnectionFactory : IDisposable { /// /// Opens a connection. diff --git a/MongoDBDriver/Connections/PooledConnectionFactory.cs b/source/MongoDB/Connections/PooledConnectionFactory.cs similarity index 93% rename from MongoDBDriver/Connections/PooledConnectionFactory.cs rename to source/MongoDB/Connections/PooledConnectionFactory.cs index 639641e8..248826b6 100644 --- a/MongoDBDriver/Connections/PooledConnectionFactory.cs +++ b/source/MongoDB/Connections/PooledConnectionFactory.cs @@ -2,13 +2,13 @@ using System.Collections.Generic; using System.Threading; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { /// /// Connection pool implementation based on this document: /// http://msdn.microsoft.com/en-us/library/8xx3tyca%28VS.100%29.aspx /// - public class PooledConnectionFactory : ConnectionFactoryBase + internal class PooledConnectionFactory : ConnectionFactoryBase { private readonly object _syncObject = new object(); private readonly Queue _freeConnections = new Queue(); @@ -112,14 +112,17 @@ private bool IsAlive(RawConnection connection) if(connection == null) throw new ArgumentNullException("connection"); - if(Builder.ConnectionLifetime!=TimeSpan.Zero) - if(connection.CreationTime.Add(Builder.ConnectionLifetime) < DateTime.Now) - return false; - if(!connection.IsConnected) return false; - return !connection.IsInvalid; + if(connection.IsInvalid) + return false; + + if(Builder.ConnectionLifetime != TimeSpan.Zero) + if(connection.CreationTime.Add(Builder.ConnectionLifetime) < DateTime.Now) + return false; + + return true; } /// diff --git a/MongoDBDriver/Connections/RawConnection.cs b/source/MongoDB/Connections/RawConnection.cs similarity index 74% rename from MongoDBDriver/Connections/RawConnection.cs rename to source/MongoDB/Connections/RawConnection.cs index fea6d52b..c31d93d2 100644 --- a/MongoDBDriver/Connections/RawConnection.cs +++ b/source/MongoDB/Connections/RawConnection.cs @@ -1,15 +1,17 @@ using System; +using System.Collections.Generic; using System.Net.Sockets; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { /// /// Represents a raw connection on the wire which is managed by the /// connection pool. /// - public class RawConnection : IDisposable + internal class RawConnection : IDisposable { private readonly TcpClient _client = new TcpClient(); + private readonly List _authenticatedDatabases = new List(); private bool _isDisposed; /// @@ -23,11 +25,13 @@ public RawConnection(MongoServerEndPoint endPoint,TimeSpan connectionTimeout) throw new ArgumentNullException("endPoint"); EndPoint = endPoint; - CreationTime = DateTime.Now; + CreationTime = DateTime.UtcNow; _client.NoDelay = true; _client.ReceiveTimeout = (int)connectionTimeout.TotalMilliseconds; _client.SendTimeout = (int)connectionTimeout.TotalMilliseconds; + + //Todo: custom exception? _client.Connect(EndPoint.Host, EndPoint.Port); } @@ -82,19 +86,27 @@ public bool IsConnected public MongoServerEndPoint EndPoint { get; private set; } /// - /// Gets or sets a value indicating whether this instance is authenticated. + /// Determines whether the specified database name is authenticated. /// - /// - /// true if this instance is authenticated; otherwise, false. - /// - public bool IsAuthenticated { get; private set; } + /// Name of the database. + /// + /// true if the specified database name is authenticated; otherwise, false. + /// + public bool IsAuthenticated(string databaseName){ + if(databaseName == null) + throw new ArgumentNullException("databaseName"); + + return _authenticatedDatabases.Contains(databaseName); + } /// /// Marks as authenticated. /// - public void MarkAuthenticated() - { - IsAuthenticated = true; + public void MarkAuthenticated(string databaseName){ + if(databaseName == null) + throw new ArgumentNullException("databaseName"); + + _authenticatedDatabases.Add(databaseName); } /// diff --git a/MongoDBDriver/Connections/SimpleConnectionFactory.cs b/source/MongoDB/Connections/SimpleConnectionFactory.cs similarity index 88% rename from MongoDBDriver/Connections/SimpleConnectionFactory.cs rename to source/MongoDB/Connections/SimpleConnectionFactory.cs index 258b8b01..a84d5655 100644 --- a/MongoDBDriver/Connections/SimpleConnectionFactory.cs +++ b/source/MongoDB/Connections/SimpleConnectionFactory.cs @@ -1,11 +1,11 @@ using System; -namespace MongoDB.Driver.Connections +namespace MongoDB.Connections { /// /// Simple connection factory which only creates and closes connections. /// - public class SimpleConnectionFactory : ConnectionFactoryBase + internal class SimpleConnectionFactory : ConnectionFactoryBase { /// /// Initializes a new instance of the class. diff --git a/source/MongoDB/Cursor_1.cs b/source/MongoDB/Cursor_1.cs new file mode 100644 index 00000000..dc2e0dd3 --- /dev/null +++ b/source/MongoDB/Cursor_1.cs @@ -0,0 +1,448 @@ +using System; +using System.Collections.Generic; +using System.IO; +using MongoDB.Connections; +using MongoDB.Protocol; +using MongoDB.Serialization; +using System.Linq; +using MongoDB.Util; +using MongoDB.Configuration.Mapping; + +namespace MongoDB +{ + /// + /// + /// + /// + public class Cursor : ICursor where T : class + { + private readonly Connection _connection; + private readonly string _databaseName; + private readonly Document _specOpts = new Document(); + private object _spec; + private object _fields; + private int _limit; + private QueryOptions _options; + private ReplyMessage _reply; + private int _skip; + private bool _keepCursor; + private readonly ISerializationFactory _serializationFactory; + private readonly IMappingStore _mappingStore; + + /// + /// Initializes a new instance of the class. + /// + /// The serialization factory. + /// The mapping store. + /// The conn. + /// Name of the database. + /// Name of the collection. + internal Cursor(ISerializationFactory serializationFactory, IMappingStore mappingStore, Connection connection, string databaseName, string collectionName) + { + //Todo: add public constrcutor for users to call + IsModifiable = true; + _connection = connection; + _databaseName = databaseName; + FullCollectionName = databaseName + "." + collectionName; + _serializationFactory = serializationFactory; + _mappingStore = mappingStore; + } + + /// + /// Initializes a new instance of the class. + /// + /// The serialization factory. + /// The mapping store. + /// The conn. + /// Name of the database. + /// Name of the collection. + /// The spec. + /// The limit. + /// The skip. + /// The fields. + internal Cursor(ISerializationFactory serializationFactory, IMappingStore mappingStore, Connection connection, string databaseName, string collectionName, object spec, int limit, int skip, object fields) + : this(serializationFactory, mappingStore, connection, databaseName, collectionName) + { + //Todo: add public constrcutor for users to call + if (spec == null) + spec = new Document(); + _spec = spec; + _limit = limit; + _skip = skip; + _fields = fields; + } + + /// + /// Releases unmanaged resources and performs other cleanup operations before the + /// is reclaimed by garbage collection. + /// + ~Cursor(){ + Dispose(false); + } + + /// + /// Gets or sets the full name of the collection. + /// + /// The full name of the collection. + public string FullCollectionName { get; private set; } + + /// + /// Gets or sets the id. + /// + /// The id. + public long Id { get; private set; } + + /// + /// Specs the specified spec. + /// + /// The spec. + /// + public ICursor Spec(object spec){ + TryModify(); + _spec = spec; + return this; + } + + /// + /// Limits the specified limit. + /// + /// The limit. + /// + public ICursor Limit(int limit){ + TryModify(); + _limit = limit; + return this; + } + + /// + /// Skips the specified skip. + /// + /// The skip. + /// + public ICursor Skip(int skip){ + TryModify(); + _skip = skip; + return this; + } + + /// + /// Fieldses the specified fields. + /// + /// The fields. + /// + public ICursor Fields(object fields){ + TryModify(); + _fields = fields; + return this; + } + + /// + /// Sorts the specified field. + /// + /// The field. + /// + public ICursor Sort(string field){ + return Sort(field, IndexOrder.Ascending); + } + + /// + /// Sorts the specified field. + /// + /// The field. + /// The order. + /// + public ICursor Sort(string field, IndexOrder order){ + return Sort(new Document().Add(field, order)); + } + + /// + /// Sorts the specified fields. + /// + /// The fields. + /// + public ICursor Sort(object fields){ + TryModify(); + AddOrRemoveSpecOpt("$orderby", fields); + return this; + } + + /// + /// Hints the specified index. + /// + /// The index. + /// + public ICursor Hint(object index){ + TryModify(); + AddOrRemoveSpecOpt("$hint", index); + return this; + } + + /// + /// Keeps the cursor open. + /// + /// if set to true [value]. + /// + /// + /// By default cursors are closed automaticly after documents + /// are Enumerated. + /// + public ICursor KeepCursor(bool value) + { + _keepCursor = value; + return this; + } + + /// + /// Snapshots the specified index. + /// + public ICursor Snapshot(){ + TryModify(); + AddOrRemoveSpecOpt("$snapshot", true); + return this; + } + + /// + /// Explains this instance. + /// + /// + public Document Explain(){ + TryModify(); + _specOpts["$explain"] = true; + + var explainResult = RetrieveData(); + try + { + var explain = explainResult.Documents.FirstOrDefault(); + + if(explain==null) + throw new InvalidOperationException("Explain failed. No documents where returned."); + + return explain; + } + finally + { + if(explainResult.CursorId > 0) + KillCursor(explainResult.CursorId); + } + } + + /// + /// Gets a value indicating whether this is modifiable. + /// + /// true if modifiable; otherwise, false. + public bool IsModifiable { get; private set; } + + /// + /// Gets the documents. + /// + /// The documents. + public IEnumerable Documents { + get { + do + { + _reply = RetrieveData(); + + if(_reply == null) + throw new InvalidOperationException("Expecting reply but get null"); + + foreach(var document in _reply.Documents) + yield return document; + } + while(Id > 0 && _limit + /// Gets the cursor position. + /// + /// The cursor position. + public int CursorPosition + { + get + { + if(_reply == null) + return 0; + + return _reply.StartingFrom + _reply.NumberReturned; + } + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases unmanaged and - optionally - managed resources + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + protected virtual void Dispose(bool disposing) + { + if(Id == 0 || !_connection.IsConnected) //All server side resources disposed of. + return; + + KillCursor(Id); + } + + /// + /// Optionses the specified options. + /// + /// The options. + /// + public ICursor Options(QueryOptions options){ + TryModify(); + _options = options; + return this; + } + + /// + /// Kills the cursor. + /// + private void KillCursor(long cursorId) + { + var killCursorsMessage = new KillCursorsMessage(cursorId); + + try { + _connection.SendMessage(killCursorsMessage,_databaseName); + Id = 0; + } catch (IOException exception) { + throw new MongoConnectionException("Could not read data, communication failure", _connection, exception); + } + } + + /// + /// Retrieves the data. + /// + /// The type of the reply. + /// + private ReplyMessage RetrieveData() where TReply : class + { + IsModifiable = false; + + IRequestMessage message; + + if(Id <= 0) + { + var writerSettings = _serializationFactory.GetBsonWriterSettings(typeof(T)); + + message = new QueryMessage(writerSettings) + { + FullCollectionName = FullCollectionName, + Query = BuildSpec(), + NumberToReturn = _limit, + NumberToSkip = _skip, + Options = _options, + ReturnFieldSelector = ConvertFieldSelectorToDocument(_fields) + }; + } + else + { + message = new GetMoreMessage(FullCollectionName, Id, _limit); + } + + var readerSettings = _serializationFactory.GetBsonReaderSettings(typeof(T)); + + try + { + + var reply = _connection.SendTwoWayMessage(message, readerSettings, _databaseName); + + Id = reply.CursorId; + + return reply; + } + catch(IOException exception) + { + throw new MongoConnectionException("Could not read data, communication failure", _connection, exception); + } + } + + /// + /// Tries the modify. + /// + private void TryModify(){ + if(!IsModifiable) + throw new InvalidOperationException("Cannot modify a cursor that has already returned documents."); + } + + /// + /// Adds the or remove spec opt. + /// + /// The key. + /// The doc. + private void AddOrRemoveSpecOpt(string key, object doc){ + if (doc == null) + _specOpts.Remove(key); + else + _specOpts[key] = doc; + } + + /// + /// Builds the spec. + /// + /// + private object BuildSpec(){ + if (_specOpts.Count == 0) + return _spec; + + var document = new Document(); + _specOpts.CopyTo(document); + document["$query"] = _spec; + return document; + } + + private Document ConvertFieldSelectorToDocument(object document) + { + Document doc; + if (document == null) + doc = new Document(); + else + doc = ConvertExampleToDocument(document) as Document; + + if (doc == null) + throw new NotSupportedException("An entity type is not supported in field selection. Use either a document or an anonymous type."); + + var classMap = _mappingStore.GetClassMap(typeof(T)); + if (doc.Count > 0 && (classMap.IsPolymorphic || classMap.IsSubClass)) + doc[classMap.DiscriminatorAlias] = true; + + return doc.Count == 0 ? null : doc; + } + + private object ConvertExampleToDocument(object document) + { + if (document == null) + return null; + + Document doc = document as Document; + if (doc != null) + return doc; + + doc = new Document(); + + if (!(document is T)) //some type that is being used as an example + { + foreach (var prop in document.GetType().GetProperties()) + { + if (!prop.CanRead) + continue; + + object value = prop.GetValue(document, null); + if (!TypeHelper.IsNativeToMongo(prop.PropertyType)) + value = ConvertExampleToDocument(value); + + doc[prop.Name] = value; + } + } + + return doc; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/DBRef.cs b/source/MongoDB/DBRef.cs new file mode 100644 index 00000000..8ed389d9 --- /dev/null +++ b/source/MongoDB/DBRef.cs @@ -0,0 +1,227 @@ +using System; + +namespace MongoDB +{ + /// + /// Native type that maps to a database reference. Use Database.FollowReference(DBRef) to retrieve the document + /// that it refers to. + /// + /// + /// DBRefs are just a specification for a specially formatted Document. At this time the database + /// does no special handling of them. Any referential integrity must be maintained by the application + /// not the database. + /// + [Serializable] + public sealed class DBRef : IEquatable + { + internal const string IdName = "$id"; + internal const string MetaName = "metadata"; + internal const string RefName = "$ref"; + + private readonly Document _document; + private string _collectionName; + private object _id; + private Document _metadata; + + /// + /// Initializes a new instance of the class. + /// + public DBRef(){ + _document = new Document(); + } + + /// + /// Constructs a DBRef from a document that matches the DBref specification. + /// + public DBRef(Document document){ + if(document == null) + throw new ArgumentNullException("document"); + if(IsDocumentDBRef(document) == false) + throw new ArgumentException("Document is not a valid DBRef"); + + _collectionName = (String)document[RefName]; + _id = document[IdName]; + _document = document; + if(document.ContainsKey("metadata")) + MetaData = (Document)document["metadata"]; + } + + /// + /// Initializes a new instance of the class. + /// + /// The database reference. + public DBRef(DBRef databaseReference){ + if(databaseReference == null) + throw new ArgumentNullException("databaseReference"); + + _document = new Document(); + CollectionName = databaseReference.CollectionName; + Id = databaseReference.Id; + if(databaseReference.MetaData != null) + MetaData = new Document().Merge(databaseReference.MetaData); + } + + /// + /// Initializes a new instance of the class. + /// + /// Name of the collection. + /// The id. + public DBRef(string collectionName, object id){ + if(collectionName == null) + throw new ArgumentNullException("collectionName"); + if(id == null) + throw new ArgumentNullException("id"); + + _document = new Document(); + CollectionName = collectionName; + Id = id; + } + + /// + /// The name of the collection the referenced document is in. + /// + public string CollectionName{ + get { return _collectionName; } + set{ + _collectionName = value; + _document[RefName] = value; + } + } + + /// + /// Object value of the id. It isn't an Oid because document ids are not required to be oids. + /// + public object Id{ + get { return _id; } + set{ + _id = value; + _document[IdName] = value; + } + } + + /// + /// An extension to the spec that allows storing of arbitrary data about a reference. + /// + /// The meta data. + /// + /// This is a non-standard feature. + /// + public Document MetaData{ + get { return _metadata; } + set{ + _metadata = value; + _document[MetaName] = value; + } + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj){ + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(DBRef) && Equals((DBRef)obj); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode(){ + unchecked + { + var result = (_document != null ? _document.GetHashCode() : 0); + result = (result*397) ^ (_collectionName != null ? _collectionName.GetHashCode() : 0); + result = (result*397) ^ (_id != null ? _id.GetHashCode() : 0); + result = (result*397) ^ (_metadata != null ? _metadata.GetHashCode() : 0); + return result; + } + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString(){ + return _document.ToString(); + } + + /// + /// Deprecated. Use the new DBRef(Document) constructor instead. + /// + public static DBRef FromDocument(Document document){ + return new DBRef(document); + } + + /// + /// Determines whether [is document DB ref] [the specified document]. + /// + /// The document. + /// + /// true if [is document DB ref] [the specified document]; otherwise, false. + /// + public static bool IsDocumentDBRef(Document document){ + return document != null && document.ContainsKey(RefName) && document.ContainsKey(IdName); + } + + /// + /// Performs an explicit conversion from to . + /// + /// The db ref. + /// The result of the conversion. + public static explicit operator Document(DBRef dbRef){ + return dbRef._document; + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(DBRef other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + return Equals(other._document, _document) && Equals(other._collectionName, _collectionName) && Equals(other._id, _id) && Equals(other._metadata, _metadata); + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(DBRef left, DBRef right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(DBRef left, DBRef right) + { + return !Equals(left, right); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/DatabaseJavascript.cs b/source/MongoDB/DatabaseJavascript.cs new file mode 100644 index 00000000..f179bd24 --- /dev/null +++ b/source/MongoDB/DatabaseJavascript.cs @@ -0,0 +1,281 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB +{ + /// + /// Encapsulates and provides access to the serverside javascript stored in db.system.js. + /// + public class DatabaseJavascript : ICollection + { + private readonly IMongoCollection _collection; + + /// + /// Initializes a new instance of the class. + /// + /// The database. + internal DatabaseJavascript(IMongoDatabase database) + { + if(database == null) + throw new ArgumentNullException("database"); + + _collection = database["system.js"]; + + EnsureIndexExists(); + } + + /// + /// Ensures the index exists. + /// + /// + /// Needed for some versions of the db to retrieve the functions. + /// + private void EnsureIndexExists() + { + _collection.Metadata.CreateIndex(new Document("_id", 1), true); + } + + /// + /// Gets or sets the with the specified name. + /// + /// + public Document this[String name] + { + get { return GetFunction(name); } + set { Add(value); } + } + + /// + /// Stores a function in the database. + /// + /// The object to add to the . + /// + /// The is read-only. + /// + public void Add(Document item) + { + if(_collection.FindOne(new Document("_id", item.Id)) != null) + throw new ArgumentException(String.Format("Function {0} already exists in the database.", item.Id)); + + _collection.Insert(item); + } + + /// + /// Removes every function in the database. + /// + /// + /// The is read-only. + /// + public void Clear() + { + _collection.Remove(new Document()); + } + + /// + /// Determines whether the contains a specific value. + /// + /// The object to locate in the . + /// + /// true if is found in the ; otherwise, false. + /// + public bool Contains(Document item) + { + return Contains((string)item["_id"]); + } + + /// + /// Copies the elements of the to an , starting at a particular index. + /// + /// The one-dimensional that is the destination of the elements copied from . The must have zero-based indexing. + /// The zero-based index in at which copying begins. + /// + /// is null. + /// + /// + /// is less than 0. + /// + /// + /// is multidimensional. + /// -or- + /// is equal to or greater than the length of . + /// -or- + /// The number of elements in the source is greater than the available space from to the end of the destination . + /// -or- + /// Type cannot be cast automatically to the type of the destination . + /// + public void CopyTo(Document[] array, int arrayIndex) + { + using(var cursor = _collection.FindAll().Limit(array.Length - 1).Skip(arrayIndex).Sort("_id")) + { + var index = arrayIndex; + foreach(var document in cursor.Documents) + { + array[index] = document; + index++; + } + } + } + + /// + /// Removes the first occurrence of a specific object from the . + /// + /// The object to remove from the . + /// + /// true if was successfully removed from the ; otherwise, false. This method also returns false if is not found in the original . + /// + /// + /// The is read-only. + /// + public bool Remove(Document item) + { + return Remove((string)item["_id"]); + } + + /// + /// Gets the number of elements contained in the . + /// + /// + /// + /// The number of elements contained in the . + /// + public int Count + { + get + { + var count = _collection.Count(); + if(count > int.MaxValue) + return int.MaxValue; //lots of functions. + return (int)count; + } + } + + /// + /// Gets a value indicating whether the is read-only. + /// + /// + /// true if the is read-only; otherwise, false. + /// + public bool IsReadOnly + { + get { return false; } + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + public IEnumerator GetEnumerator() + { + using(var cursor = _collection.FindAll()) + { + foreach(var document in cursor.Documents) + yield return document; + } + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Gets the document representing the function in the database. + /// + /// + /// A + /// + /// + /// A + /// + public Document GetFunction(string name) + { + return _collection.FindOne(new Document().Add("_id", name)); + } + + /// + /// Returns a listing of the names of all the functions in the database + /// + public List GetFunctionNames() + { + return _collection.FindAll().Documents.Select(document => (String)document.Id).ToList(); + } + + /// + /// Adds the specified name. + /// + /// The name. + /// The func. + public void Add(string name, string func) + { + Add(name, new Code(func)); + } + + /// + /// Adds the specified name. + /// + /// The name. + /// The func. + public void Add(string name, Code func) + { + Add(new Document("_id", name).Add("value", func)); + } + + /// + /// Store a function in the database with an extended attribute called version. + /// + /// + /// Version attributes are an extension to the spec. Function names must be unique + /// to the database so only one version can be stored at a time. This is most useful for libraries + /// that store function in the database to make sure that the function they are using is the most + /// up to date. + /// + public void Add(string name, Code func, float version) + { + Add(new Document("_id", name).Add("value", func).Add("version", version)); + } + + /// + /// Checks to see if a function named name is stored in the database. + /// + /// + /// A + /// + /// + /// A + /// + public bool Contains(string name) + { + return GetFunction(name) != null; + } + + /// + /// Updates the specified item. + /// + /// The item. + public void Update(Document item) + { + throw new NotImplementedException(); + } + + /// + /// Removes the specified name. + /// + /// The name. + /// + public bool Remove(string name) + { + _collection.Remove(new Document("_id", name)); + return true; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/DatabaseMetadata.cs b/source/MongoDB/DatabaseMetadata.cs new file mode 100644 index 00000000..ad59e447 --- /dev/null +++ b/source/MongoDB/DatabaseMetadata.cs @@ -0,0 +1,144 @@ +using System; +using MongoDB.Configuration; +using MongoDB.Connections; +using MongoDB.Util; + +namespace MongoDB +{ + /// + /// Administration of metadata for a database. + /// + public class DatabaseMetadata + { + private readonly MongoConfiguration _configuration; + private readonly Connection _connection; + private readonly MongoDatabase _database; + private readonly string _name; + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The name. + /// The conn. + internal DatabaseMetadata(MongoConfiguration configuration, string name, Connection conn) + { + //Todo: add public constrcutor for users to call + _configuration = configuration; + _connection = conn; + _name = name; + _database = new MongoDatabase(_configuration, conn, name); + } + + /// + /// Creates the collection. + /// + /// The name. + /// + public IMongoCollection CreateCollection(string name) + { + return CreateCollection(name, null); + } + + /// + /// Creates the collection. + /// + /// The name. + /// The options. + /// + public IMongoCollection CreateCollection(string name, Document options) + { + var cmd = new Document(); + cmd.Add("create", name).Merge(options); + _database.SendCommand(cmd); + return new MongoCollection(_configuration, _connection, _name, name); + } + + /// + /// Drops the collection. + /// + /// The col. + /// + public bool DropCollection(MongoCollection collection) + { + return DropCollection(collection.Name); + } + + /// + /// Drops the collection. + /// + /// The name. + /// + public bool DropCollection(string name) + { + var result = _database.SendCommand(new Document().Add("drop", name)); + return result.ContainsKey("ok") && Convert.ToBoolean(result["ok"]); + } + + /// + /// Drops the database. + /// + /// + public bool DropDatabase() + { + var result = _database.SendCommand("dropDatabase"); + return result.ContainsKey("ok") && Convert.ToBoolean(result["ok"]); + } + + /// + /// Adds the user. + /// + /// The username. + /// The password. + public void AddUser(string username, string password) + { + var users = _database["system.users"]; + var pwd = MongoHash.Generate(username + ":mongo:" + password); + var user = new Document().Add("user", username).Add("pwd", pwd); + + if(FindUser(username) != null) + throw new MongoException("A user with the name " + username + " already exists in this database.", null); + users.Insert(user); + } + + /// + /// Removes the user. + /// + /// The username. + public void RemoveUser(string username) + { + var users = _database["system.users"]; + users.Remove(new Document().Add("user", username)); + } + + /// + /// Lists the users. + /// + /// + public ICursor ListUsers() + { + var users = _database["system.users"]; + return users.FindAll(); + } + + /// + /// Finds the user. + /// + /// The username. + /// + public Document FindUser(string username) + { + return FindUser(new Document().Add("user", username)); + } + + /// + /// Finds the user. + /// + /// The spec. + /// + public Document FindUser(Document spec) + { + return _database["system.users"].FindOne(spec); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Document.cs b/source/MongoDB/Document.cs new file mode 100644 index 00000000..c09615e0 --- /dev/null +++ b/source/MongoDB/Document.cs @@ -0,0 +1,726 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; +using MongoDB.Util; + +namespace MongoDB +{ + /// + /// Description of Document. + /// + [Serializable] + public class Document : IDictionary, IDictionary, IXmlSerializable + { + private readonly List _orderedKeys; + private readonly Dictionary _dictionary; + private readonly IComparer _keyComparer; + + /// + /// Initializes a new instance of the class. + /// + public Document(){ + _dictionary = new Dictionary(); + _orderedKeys = new List(); + } + + /// + /// Initialize a new instance of the class with an optional key sorter. + /// + public Document(IComparer keyComparer) + :this() + { + if(keyComparer == null) + throw new ArgumentNullException("keyComparer"); + + _keyComparer = keyComparer; + } + + /// + /// Initializes a new instance of the class and + /// add's the given values to it. + /// + /// The key. + /// The value. + public Document(string key,object value) + : this() + { + Add(key, value); + } + + /// + /// Initializes a new instance of the class. + /// + /// The dictionary. + public Document(IEnumerable> dictionary) + :this() + { + if(dictionary == null) + throw new ArgumentNullException("dictionary"); + + foreach(var entry in dictionary) + Add(entry.Key, entry.Value); + } + + /// + /// Gets or sets the with the specified key. + /// + /// + public object this[string key]{ + get { return Get(key); } + set { Set(key, value); } + } + + /// + /// Gets an containing the keys of the . + /// + /// + /// + /// An containing the keys of the object that implements . + /// + ICollection IDictionary.Keys + { + get { return _dictionary.Keys; } + } + + /// + /// Gets an containing the values in the . + /// + /// + /// + /// An containing the values in the object that implements . + /// + ICollection IDictionary.Values + { + get { return _dictionary.Values; } + } + + /// + /// Gets an containing the values in the . + /// + /// + /// + /// An containing the values in the object that implements . + /// + public ICollection Values{ + get { return _dictionary.Values; } + } + + /// + /// Gets an containing the keys of the . + /// + /// + /// + /// An containing the keys of the object that implements . + /// + public ICollection Keys{ + get { return _orderedKeys.AsReadOnly(); } + } + + /// + /// Gets or sets the mongo _id field. + /// + /// The id. + public object Id + { + get { return this["_id"]; } + set { this["_id"] = value; } + } + + /// + /// Gets the value of the specified key. + /// + /// The key. + /// + public object Get(string key) + { + object item; + return _dictionary.TryGetValue(key, out item) ? item : null; + } + + /// + /// Gets the typed value of the specified key. + /// + /// + /// The key. + /// + public T Get(string key){ + var value = Get(key); + if (value == null) + return default(T); + return (T)Convert.ChangeType(value, typeof(T)); + } + + /// + /// Gets the value associated with the specified key. + /// + /// The key whose value to get. + /// When this method returns, the value associated with the specified key, if the key is found; otherwise, the default value for the type of the parameter. This parameter is passed uninitialized. + /// + /// true if the object that implements contains an element with the specified key; otherwise, false. + /// + /// + /// is null. + /// + public bool TryGetValue(string key, out object value){ + return _dictionary.TryGetValue(key, out value); + } + + /// + /// Determines whether the contains an element with the specified key. + /// + /// The key to locate in the . + /// + /// true if the contains an element with the key; otherwise, false. + /// + /// + /// is null. + /// + public bool ContainsKey(string key){ + return _dictionary.ContainsKey(key); + } + + /// + /// Adds an element with the provided key and value to the . + /// + /// The object to use as the key of the element to add. + /// The object to use as the value of the element to add. + /// + /// is null. + /// + /// + /// An element with the same key already exists in the . + /// + /// + /// The is read-only. + /// + public Document Add(string key, object value) + { + _dictionary.Add(key, value); + _orderedKeys.Add(key);//Relies on ArgumentException from above if key already exists. + EnsureKeyOrdering(); + return this; + } + + /// + /// Adds an element with the provided key and value to the . + /// + /// The object to use as the key of the element to add. + /// The object to use as the value of the element to add. + /// + /// is null. + /// + /// + /// An element with the same key already exists in the . + /// + /// + /// The is read-only. + /// + void IDictionary.Add(string key, object value){ + Add(key,value); + } + + /// + /// Appends the specified key. + /// + /// The key. + /// The value. + /// + [Obsolete("Use Add instead. This method is about to be removed in a future version.")] + public Document Append(string key, object value){ + return Add(key, value); + } + + /// + /// Sets the value of the specified key. + /// + /// The key. + /// The value. + /// + public Document Set(string key, object value){ + if(key == null) + throw new ArgumentNullException("key"); + + if(!_orderedKeys.Contains(key)) + _orderedKeys.Add(key); + + _dictionary[key] = value; + + EnsureKeyOrdering(); + + return this; + } + + /// + /// Adds an item to the Document at the specified position + /// + /// The key. + /// The value. + /// The position. + public void Insert(string key, object value, int position){ + _dictionary.Add(key, value);//Relies on ArgumentException from above if key already exists. + _orderedKeys.Insert(position, key); + EnsureKeyOrdering(); + } + + /// + /// Prepends the specified key. + /// + /// The key. + /// The value. + /// This document + public Document Prepend(string key, object value){ + Insert(key, value, 0); + return this; + } + + /// + /// Merges the source document into this. + /// + /// The source. + /// This document + public Document Merge(Document source) + { + if(source == null) + return this; + + foreach(var key in source.Keys) + this[key] = source[key]; + + return this; + } + + /// + /// Removes the specified key. + /// + /// The key. + /// + /// true if the element is successfully removed; otherwise, false. This method also returns false if was not found in the original . + /// + /// + /// is null. + /// + /// + /// The is read-only. + /// + public bool Remove(string key){ + _orderedKeys.Remove(key); + return _dictionary.Remove(key); + } + + /// + /// Adds an item to the . + /// + /// The object to add to the . + /// + /// The is read-only. + /// + void ICollection>.Add(KeyValuePair item){ + Add(item.Key, item.Value); + } + + /// + /// Determines whether the object contains an element with the specified key. + /// + /// The key to locate in the object. + /// + /// true if the contains an element with the key; otherwise, false. + /// + /// + /// is null. + /// + bool IDictionary.Contains(object key) + { + return _orderedKeys.Contains(Convert.ToString(key)); + } + + /// + /// Adds an element with the provided key and value to the object. + /// + /// The to use as the key of the element to add. + /// The to use as the value of the element to add. + /// + /// is null. + /// + /// + /// An element with the same key already exists in the object. + /// + /// + /// The is read-only. + /// -or- + /// The has a fixed size. + /// + void IDictionary.Add(object key, object value) + { + Add(Convert.ToString(key), value); + } + + /// + /// Clears the contents of the instance. + /// + /// + /// The is read-only. + /// + public void Clear(){ + _dictionary.Clear(); + _orderedKeys.Clear(); + } + + /// + /// Returns an object for the object. + /// + /// + /// An object for the object. + /// + IDictionaryEnumerator IDictionary.GetEnumerator() + { + return ( (IDictionary)_dictionary ).GetEnumerator(); + } + + /// + /// Removes the element with the specified key from the object. + /// + /// The key of the element to remove. + /// + /// is null. + /// + /// + /// The object is read-only. + /// -or- + /// The has a fixed size. + /// + void IDictionary.Remove(object key) + { + Remove(Convert.ToString(key)); + } + + /// + /// Gets or sets the with the specified key. + /// + /// + object IDictionary.this[object key] + { + get { return Get(Convert.ToString(key)); } + set { Set(Convert.ToString(key), value); } + } + + /// + /// Determines whether the contains a specific value. + /// + /// The object to locate in the . + /// + /// true if is found in the ; otherwise, false. + /// + bool ICollection>.Contains(KeyValuePair item){ + return ((IDictionary)_dictionary).Contains(item); + } + + /// + /// Copies the elements of the to an , starting at a particular index. + /// + /// The one-dimensional that is the destination of the elements copied from . The must have zero-based indexing. + /// The zero-based index in at which copying begins. + void ICollection>.CopyTo(KeyValuePair[] array, int arrayIndex){ + ((ICollection>)_dictionary).CopyTo(array,arrayIndex); + } + + /// + /// Copies to items to destinationDocument. + /// + /// The destination document. + public void CopyTo(Document destinationDocument){ + if(destinationDocument == null) + throw new ArgumentNullException("destinationDocument"); + + //Todo: Fix any accidental reordering issues. + + foreach(var key in _orderedKeys){ + if(destinationDocument.ContainsKey(key)) + destinationDocument.Remove(key); + destinationDocument[key] = this[key]; + } + } + /// + /// Removes the first occurrence of a specific object from the . + /// + /// The object to remove from the . + /// + /// true if was successfully removed from the ; otherwise, false. This method also returns false if is not found in the original . + /// + /// + /// The is read-only. + /// + public bool Remove(KeyValuePair item){ + var removed = ((ICollection>)_dictionary).Remove(item); + if(removed) + _orderedKeys.Remove(item.Key); + return removed; + } + + /// + /// Copies the elements of the to an , starting at a particular index. + /// + /// The one-dimensional that is the destination of the elements copied from . The must have zero-based indexing. + /// The zero-based index in at which copying begins. + /// + /// is null. + /// + /// + /// is less than zero. + /// + /// + /// is multidimensional. + /// -or- + /// is equal to or greater than the length of . + /// -or- + /// The number of elements in the source is greater than the available space from to the end of the destination . + /// + /// + /// The type of the source cannot be cast automatically to the type of the destination . + /// + void ICollection.CopyTo(Array array, int index) + { + ((ICollection)_dictionary).CopyTo(array,index); + } + + /// + /// Gets the number of elements contained in the . + /// + /// + /// + /// The number of elements contained in the . + /// + public int Count{ + get { return _dictionary.Count; } + } + + /// + /// Gets an object that can be used to synchronize access to the . + /// + /// + /// + /// An object that can be used to synchronize access to the . + /// + object ICollection.SyncRoot + { + get { return _orderedKeys; /* no special object is need since _orderedKeys is internal.*/ } + } + + /// + /// Gets a value indicating whether access to the is synchronized (thread safe). + /// + /// + /// true if access to the is synchronized (thread safe); otherwise, false. + /// + bool ICollection.IsSynchronized + { + get { return false; } + } + + /// + /// Gets a value indicating whether the is read-only. + /// + /// + /// true if the is read-only; otherwise, false. + /// + public bool IsReadOnly{ + get { return false; } + } + + /// + /// Gets a value indicating whether the object has a fixed size. + /// + /// + /// true if the object has a fixed size; otherwise, false. + /// + bool IDictionary.IsFixedSize + { + get { return false; } + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj){ + if(obj is Document) + return Equals(obj as Document); + return base.Equals(obj); + } + + /// + /// Equalses the specified obj. + /// + /// The obj. + /// + public bool Equals(Document document){ + if(document == null) + return false; + if(_orderedKeys.Count != document._orderedKeys.Count) + return false; + return GetHashCode() == document.GetHashCode(); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode(){ + var hash = 27; + foreach(var key in _orderedKeys){ + var valueHashCode = GetValueHashCode(this[key]); + unchecked{ + hash = (13*hash) + key.GetHashCode(); + hash = (13*hash) + valueHashCode; + } + } + return hash; + } + + /// + /// Gets the value hash code. + /// + /// The value. + /// + private int GetValueHashCode(object value){ + if(value == null) + return 0; + return (value is Array) ? GetArrayHashcode((Array)value) : value.GetHashCode(); + } + + /// + /// Gets the array hashcode. + /// + /// The array. + /// + private int GetArrayHashcode(Array array){ + var hash = 0; + foreach(var value in array){ + var valueHashCode = GetValueHashCode(value); + unchecked{ + hash = (13*hash) + valueHashCode; + } + } + return hash; + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator(){ + return GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + public IEnumerator> GetEnumerator(){ + return _orderedKeys.Select(orderedKey => new KeyValuePair(orderedKey, _dictionary[orderedKey])).GetEnumerator(); + } + + /// + /// Toes the dictionary. + /// + /// + public Dictionary ToDictionary(){ + return new Dictionary(this); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString(){ + return JsonFormatter.Serialize(this); + } + + /// + /// Ensures the key ordering. + /// + private void EnsureKeyOrdering(){ + if(_keyComparer==null) + return; + + _orderedKeys.Sort(_keyComparer); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + reader.ReadStartElement(); + + while(reader.IsStartElement()) + { + var key = reader.Name; + object value = null; + + if(reader.MoveToAttribute("type")) + { + var type = Type.GetType(reader.Value); + + reader.ReadStartElement(); + + var serializer = new XmlSerializer(type); + value = serializer.Deserialize(reader); + } + else + reader.Read(); + + Add(key, value); + } + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + foreach(var pair in this) + { + writer.WriteStartElement(pair.Key); + + if(pair.Value == null) + continue; + + var type = pair.Value.GetType(); + writer.WriteAttributeString("type", type.AssemblyQualifiedName); + var serializer = new XmlSerializer(type); + serializer.Serialize(writer,pair.Value); + } + } + } +} diff --git a/source/MongoDB/Exceptions/IdGenerationException.cs b/source/MongoDB/Exceptions/IdGenerationException.cs new file mode 100644 index 00000000..5c0d424f --- /dev/null +++ b/source/MongoDB/Exceptions/IdGenerationException.cs @@ -0,0 +1,33 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// + /// + [Serializable] + public class IdGenerationException : MongoException + { + /// + /// Initializes a new instance of the class. + /// + /// The message. + public IdGenerationException(string message) : base(message) { } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public IdGenerationException(SerializationInfo info, StreamingContext context) : base(info,context) + { + } + } +} diff --git a/source/MongoDB/Exceptions/InvalidQueryException.cs b/source/MongoDB/Exceptions/InvalidQueryException.cs new file mode 100644 index 00000000..74100cf3 --- /dev/null +++ b/source/MongoDB/Exceptions/InvalidQueryException.cs @@ -0,0 +1,59 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// + /// + [Serializable] + public class InvalidQueryException : Exception + { + // + // For guidelines regarding the creation of new exception types, see + // http://msdn.microsoft.com/library/default.asp?url=/library/en-us/cpgenref/html/cpconerrorraisinghandlingguidelines.asp + // and + // http://msdn.microsoft.com/library/default.asp?url=/library/en-us/dncscol/html/csharp07192001.asp + // + + /// + /// Initializes a new instance of the class. + /// + public InvalidQueryException(){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + public InvalidQueryException(string message) + : base(message){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The inner. + public InvalidQueryException(string message, Exception inner) + : base(message, inner){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + protected InvalidQueryException( + SerializationInfo info, + StreamingContext context) + : base(info, context){ + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Exceptions/MongoCommandException.cs b/source/MongoDB/Exceptions/MongoCommandException.cs new file mode 100644 index 00000000..3d314376 --- /dev/null +++ b/source/MongoDB/Exceptions/MongoCommandException.cs @@ -0,0 +1,66 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// Raised when a command returns a failure message. + /// + [Serializable] + public class MongoCommandException : MongoException + { + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + /// The command. + public MongoCommandException(string message, Document error, Document command) + : base(message, null) + { + Error = error; + Command = command; + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + /// The command. + /// The e. + public MongoCommandException(string message, Document error, Document command, Exception e) + : base(message, e) + { + Error = error; + Command = command; + } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoCommandException(SerializationInfo info, StreamingContext context) : base(info,context) + { + } + + /// + /// Gets or sets the error. + /// + /// The error. + public Document Error { get; private set; } + + /// + /// Gets or sets the command. + /// + /// The command. + public Document Command { get; private set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Exceptions/MongoConnectionException.cs b/source/MongoDB/Exceptions/MongoConnectionException.cs new file mode 100644 index 00000000..512b2f2f --- /dev/null +++ b/source/MongoDB/Exceptions/MongoConnectionException.cs @@ -0,0 +1,87 @@ +using System; +using System.Runtime.Serialization; +using MongoDB.Connections; + +namespace MongoDB +{ + /// + /// + /// + [Serializable] + public class MongoConnectionException : MongoException + { + /// + /// Gets or sets the connection string. + /// + /// The connection string. + public string ConnectionString { get; private set; } + + /// + /// Gets or sets the end point. + /// + /// The end point. + public MongoServerEndPoint EndPoint { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The connection string. + /// The end point. + public MongoConnectionException(string message, string connectionString, MongoServerEndPoint endPoint) + : this(message,connectionString,endPoint,null) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The connection string. + /// The end point. + /// The inner exception. + public MongoConnectionException(string message, string connectionString, MongoServerEndPoint endPoint, Exception innerException) + : base(message, innerException){ + EndPoint = endPoint; + ConnectionString = connectionString; + } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The connection. + internal MongoConnectionException(string message, Connection connection) + :this(message,connection,null){} + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The connection. + /// The inner exception. + internal MongoConnectionException(string message, Connection connection, Exception innerException) + :base(message,innerException){ + if(connection == null) + throw new ArgumentNullException("connection"); + ConnectionString = connection.ConnectionString; + EndPoint = connection.EndPoint; + } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoConnectionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Exceptions/MongoDuplicateKeyException.cs b/source/MongoDB/Exceptions/MongoDuplicateKeyException.cs new file mode 100644 index 00000000..bf85f98d --- /dev/null +++ b/source/MongoDB/Exceptions/MongoDuplicateKeyException.cs @@ -0,0 +1,43 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// Raised when an action causes a unique constraint violation in an index. + /// + [Serializable] + public class MongoDuplicateKeyException : MongoOperationException + { + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + public MongoDuplicateKeyException(string message, Document error):base(message, error,null){} + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + /// The e. + public MongoDuplicateKeyException(string message, Document error, Exception e):base(message, error,e){} + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoDuplicateKeyException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} \ No newline at end of file diff --git a/MongoDBDriver/Exceptions/MongoDuplicateKeyUpdateException.cs b/source/MongoDB/Exceptions/MongoDuplicateKeyUpdateException.cs similarity index 50% rename from MongoDBDriver/Exceptions/MongoDuplicateKeyUpdateException.cs rename to source/MongoDB/Exceptions/MongoDuplicateKeyUpdateException.cs index 32608527..95e89435 100644 --- a/MongoDBDriver/Exceptions/MongoDuplicateKeyUpdateException.cs +++ b/source/MongoDB/Exceptions/MongoDuplicateKeyUpdateException.cs @@ -1,6 +1,7 @@ using System; +using System.Runtime.Serialization; -namespace MongoDB.Driver +namespace MongoDB { /// /// Raised when an update action causes a unique constraint violation in an index. @@ -8,6 +9,7 @@ namespace MongoDB.Driver /// /// It is only another class because Mongo makes a distinction and it may be helpful. /// + [Serializable] public class MongoDuplicateKeyUpdateException : MongoDuplicateKeyException { /// @@ -25,5 +27,21 @@ public MongoDuplicateKeyUpdateException(string message, Document error) /// The error. /// The e. public MongoDuplicateKeyUpdateException(string message, Document error, Exception e):base(message, error,e){} + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoDuplicateKeyUpdateException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } } } \ No newline at end of file diff --git a/source/MongoDB/Exceptions/MongoException.cs b/source/MongoDB/Exceptions/MongoException.cs new file mode 100644 index 00000000..0c2d6b41 --- /dev/null +++ b/source/MongoDB/Exceptions/MongoException.cs @@ -0,0 +1,40 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// Base class for all Mongo Exceptions + /// + [Serializable] + public class MongoException : Exception + { + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The inner. + public MongoException(string message, Exception innerException):base(message,innerException){} + + /// + /// Initializes a new instance of the class. + /// + /// The message. + public MongoException(string message):base(message){} + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoException(SerializationInfo info, StreamingContext context) : base(info,context) + { + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Exceptions/MongoMapReduceException.cs b/source/MongoDB/Exceptions/MongoMapReduceException.cs new file mode 100644 index 00000000..f34486eb --- /dev/null +++ b/source/MongoDB/Exceptions/MongoMapReduceException.cs @@ -0,0 +1,44 @@ +using System; +using System.Runtime.Serialization; +using MongoDB.Results; + +namespace MongoDB +{ + /// + /// Raised when a map reduce call fails. + /// + [Serializable] + public class MongoMapReduceException : MongoCommandException + { + /// + /// Gets or sets the map reduce result. + /// + /// The map reduce result. + public MapReduceResult MapReduceResult { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// The exception. + public MongoMapReduceException(MongoCommandException exception) + :base(exception.Message,exception.Error, exception.Command) { + MapReduceResult = new MapReduceResult(exception.Error); + } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoMapReduceException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/source/MongoDB/Exceptions/MongoOperationException.cs b/source/MongoDB/Exceptions/MongoOperationException.cs new file mode 100644 index 00000000..18f1ec1d --- /dev/null +++ b/source/MongoDB/Exceptions/MongoOperationException.cs @@ -0,0 +1,51 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// + /// + [Serializable] + public class MongoOperationException : MongoException + { + /// + /// Gets or sets the error. + /// + /// The error. + public Document Error { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + public MongoOperationException(string message, Document error):this(message, error,null){} + + /// + /// Initializes a new instance of the class. + /// + /// The message. + /// The error. + /// The e. + public MongoOperationException(string message, Document error, Exception e):base(message,e){ + this.Error = error; + } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public MongoOperationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Exceptions/UnmappedMemberException.cs b/source/MongoDB/Exceptions/UnmappedMemberException.cs new file mode 100644 index 00000000..155b194e --- /dev/null +++ b/source/MongoDB/Exceptions/UnmappedMemberException.cs @@ -0,0 +1,34 @@ +using System; +using System.Runtime.Serialization; + +namespace MongoDB +{ + /// + /// + /// + [Serializable] + public class UnmappedMemberException : MongoException + { + /// + /// Initializes a new instance of the class. + /// + /// The message. + public UnmappedMemberException(string message) : base(message) { } + + /// + /// Initializes a new instance of the class. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + /// + /// The parameter is null. + /// + /// + /// The class name is null or is zero (0). + /// + public UnmappedMemberException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/source/MongoDB/ICursor_1.cs b/source/MongoDB/ICursor_1.cs new file mode 100644 index 00000000..4ed73684 --- /dev/null +++ b/source/MongoDB/ICursor_1.cs @@ -0,0 +1,118 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB{ + /// + /// + /// + /// + public interface ICursor : IDisposable + { + /// + /// Gets the id. + /// + /// The id. + long Id { get; } + + /// + /// Specs the specified spec. + /// + /// The spec. + /// + ICursor Spec(object spec); + + /// + /// Limits the specified limit. + /// + /// The limit. + /// + ICursor Limit(int limit); + + /// + /// Skips the specified skip. + /// + /// The skip. + /// + ICursor Skip(int skip); + + /// + /// Fieldses the specified fields. + /// + /// The fields. + /// + ICursor Fields(object fields); + + /// + /// Optionses the specified options. + /// + /// The options. + /// + ICursor Options(QueryOptions options); + + /// + /// Sorts the specified field. + /// + /// The field. + /// + ICursor Sort(string field); + + /// + /// Sorts the specified field. + /// + /// The field. + /// The order. + /// + ICursor Sort(string field, IndexOrder order); + + /// + /// Sorts the specified fields. + /// + /// The fields. + /// + ICursor Sort(object fields); + + /// + /// Hints the specified index. + /// + /// The index. + /// + ICursor Hint(object index); + + /// + /// Keeps the cursor open. + /// + /// if set to true [value]. + /// + /// + /// By default cursors are closed automaticly after documents + /// are Enumerated. + /// + ICursor KeepCursor(bool value); + + /// + /// Snapshots this instance. + /// + /// + ICursor Snapshot(); + + /// + /// Explains this instance. + /// + /// + Document Explain(); + + /// + /// Gets a value indicating whether this instance is modifiable. + /// + /// + /// true if this instance is modifiable; otherwise, false. + /// + bool IsModifiable { get; } + + /// + /// Gets the documents. + /// + /// The documents. + IEnumerable Documents { get; } + } +} diff --git a/source/MongoDB/IMongo.cs b/source/MongoDB/IMongo.cs new file mode 100644 index 00000000..6dd3c8b8 --- /dev/null +++ b/source/MongoDB/IMongo.cs @@ -0,0 +1,46 @@ +using System; +namespace MongoDB +{ + /// + /// + /// + public interface IMongo : IDisposable + { + /// + /// Gets the connection string. + /// + /// The connection string. + string ConnectionString { get; } + + /// + /// Gets the named database. + /// + /// The name. + /// + IMongoDatabase GetDatabase(string name); + + /// + /// Gets the with the specified name. + /// + /// + IMongoDatabase this[string name] { get; } + + /// + /// Connects this instance. + /// + /// + void Connect(); + + /// + /// Tries to connect to server. + /// + /// + bool TryConnect(); + + /// + /// Disconnects this instance. + /// + /// + bool Disconnect(); + } +} diff --git a/source/MongoDB/IMongoCollection_1.cs b/source/MongoDB/IMongoCollection_1.cs new file mode 100644 index 00000000..e86af48b --- /dev/null +++ b/source/MongoDB/IMongoCollection_1.cs @@ -0,0 +1,357 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB +{ + /// + /// A collection is a storage unit for a group of s. The documents do not all have to + /// contain the same schema but for efficiency they should all be similar. + /// + /// + /// Safemode checks the database for any errors that may have occurred during + /// the insert such as a duplicate key constraint violation. + /// + public interface IMongoCollection + where T : class + { + /// + /// Gets the database. + /// + /// The database. + IMongoDatabase Database { get; } + + /// + /// Name of the collection. + /// + string Name { get; } + + /// + /// String value of the database name. + /// + string DatabaseName { get; } + + /// + /// Full name of the collection databasename . collectionname + /// + string FullName { get; } + + /// + /// Metadata about the collection such as indexes. + /// + CollectionMetadata Metadata { get; } + + /// + /// Finds and returns the first document in a selector query. + /// + /// The where. + /// + /// A from the collection. + /// + T FindOne(string javascriptWhere); + + /// + /// Finds and returns the first document in a selector query. + /// + /// The selector. + /// + /// A from the collection. + /// + T FindOne(object selector); + + /// + /// Returns a cursor that contains all of the documents in the collection. + /// + /// + /// Cursors load documents from the database in batches instead of all at once. + /// + ICursor FindAll(); + + /// + /// Uses the $where operator to query the collection. The value of the where is Javascript that will + /// produce a true for the documents that match the criteria. + /// + /// Javascript + /// + ICursor Find(string javascriptWhere); + + /// + /// Queries the collection using the query selector. + /// + /// The selector. + /// A + ICursor Find(object selector); + + /// + /// Queries the collection using the specification and only returns a subset of fields. + /// + /// The selector. + /// The fields. + /// A + ICursor Find(object selector, object fields); + + /// + /// Deprecated. Use the fluent interface on the cursor to specify a limit and skip value. + /// + /// The selector. + /// The limit. + /// The skip. + /// + [Obsolete("Use the fluent interface on ICursor for specifying limit and skip Find.Skip(x).Limit(y)")] + ICursor Find(object selector, int limit, int skip); + + /// + /// Queries the collection using the specification and only returns a subset of fields + /// + /// The selector. + /// The limit. + /// The skip. + /// The fields. + /// + [Obsolete("Use the fluent interface on ICursor for specifying limit and skip Find.Skip(x).Limit(y)")] + ICursor Find(object selector, int limit, int skip, object fields); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// A + T FindAndModify(object document, object selector); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// containing the names of columns to sort on with the values being the + /// A + /// + T FindAndModify(object document, object selector, object sort); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// if set to true [return new]. + /// A + T FindAndModify(object document, object selector, bool returnNew); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// containing the names of columns to sort on with the values being the + /// + /// The fields. + /// if set to true [remove]. + /// if set to true [return new]. + /// if set to true [upsert]. + /// A + T FindAndModify(object document, object selector, object sort, object fields, bool remove, bool returnNew, bool upsert); + + /// + /// Entrypoint into executing a map/reduce query against the collection. + /// + /// + MapReduce MapReduce(); + + /// + /// Count all items in the collection. + /// + long Count(); + + /// + /// Count all items in a collection that match the query selector. + /// + /// The selector. + /// + /// + /// It will return 0 if the collection doesn't exist yet. + /// + long Count(object selector); + + /// + /// Inserts the Document into the collection. + /// + void Insert(object document, bool safemode); + + /// + /// Inserts the specified doc. + /// + /// The doc. + void Insert(object document); + + /// + /// Bulk inserts the specified documents into the database. + /// + /// + /// See the safemode description in the class description + /// + void Insert(IEnumerable documents, bool safemode); + + /// + /// Bulk inserts the specified documents into the database. + /// + /// The documents. + void Insert(IEnumerable documents); + + /// + /// Deletes documents from the collection according to the selector. + /// + /// The selector. + /// if set to true [safemode]. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// See the safemode description in the class description + /// + [Obsolete("Use Remove instead")] + void Delete(object selector, bool safemode); + + /// + /// Remove documents from the collection according to the selector. + /// + /// The selector. + /// if set to true [safemode]. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// See the safemode description in the class description + /// + void Remove(object selector, bool safemode); + + /// + /// Deletes documents from the collection according to the selector. + /// + /// The selector. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// + [Obsolete("Use Remove instead")] + void Delete(object selector); + + /// + /// Remove documents from the collection according to the selector. + /// + /// The selector. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// + void Remove(object selector); + + /// + /// Inserts or updates a document in the database. If the document does not contain an _id one will be + /// generated and an upsert sent. Otherwise the document matching the _id of the document will be updated. + /// + /// The document. + /// if set to true [safemode]. + /// + /// See the safemode description in the class description + /// + [Obsolete("Use Save instead")] + void Update(object document, bool safemode); + + /// + /// Inserts or updates a document in the database. If the document does not contain an _id one will be + /// generated and an upsert sent. Otherwise the document matching the _id of the document will be updated. + /// + /// The document. + [Obsolete("Use Save instead")] + void Update(object document); + + /// + /// Updates the specified document with the current document. In order to only do a partial update use a + /// document containing modifier operations ($set, $unset, $inc, etc.) + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + /// + /// See the safemode description in the class description + /// + void Update(object document, object selector, bool safemode); + + /// + /// Updates the specified document with the current document. In order to only do a partial update use a + /// document containing modifier operations ($set, $unset, $inc, etc.) + /// + /// The document. + /// The selector. + void Update(object document, object selector); + + /// + /// Updates the specified document with the current document. In order to only do a partial update use a + /// document containing modifier operations ($set, $unset, $inc, etc.) + /// + /// The document. + /// The selector. + /// The flags. + /// if set to true [safemode]. + /// + /// See the safemode description in the class description + /// + void Update(object document, object selector, UpdateFlags flags, bool safemode); + + /// + /// Updates the specified document with the current document. In order to only do a partial update use a + /// document containing modifier operations ($set, $unset, $inc, etc.) + /// + /// The to update with + /// The query selector to find the document to update. + /// + void Update(object document, object selector, UpdateFlags flags); + + /// + /// Runs a multiple update query against the database. It will wrap any + /// doc with $set if the passed in doc doesn't contain any '$' modifier ops. + /// + /// The document. + /// The selector. + void UpdateAll(object document, object selector); + + /// + /// Runs a multiple update query against the database. It will wrap any + /// doc with $set if the passed in doc doesn't contain any '$' modifier ops. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + /// + /// See the safemode description in the class description + /// + void UpdateAll(object document, object selector, bool safemode); + + /// + /// Inserts or updates a document in the database. If the document does not contain an _id one will be + /// generated and an upsert sent. Otherwise the document matching the _id of the document will be updated. + /// + /// The document. + /// + /// The document will contain the _id that is saved to the database. + /// + void Save(object document); + + /// + /// Saves a document to the database using an upsert. + /// + /// The document. + /// if set to true [safemode]. + void Save(object document, bool safemode); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// if set to true [return new]. + /// A + T FindAndModify(object document, object spec, object sort, bool returnNew); + } +} \ No newline at end of file diff --git a/source/MongoDB/IMongoDatabase.cs b/source/MongoDB/IMongoDatabase.cs new file mode 100644 index 00000000..5001fa54 --- /dev/null +++ b/source/MongoDB/IMongoDatabase.cs @@ -0,0 +1,157 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB +{ + /// + /// + /// + public interface IMongoDatabase + { + /// + /// Gets or sets the name. + /// + /// The name. + string Name { get; } + + /// + /// Gets the meta data. + /// + /// The meta data. + DatabaseMetadata Metadata { get; } + + /// + /// Gets the javascript. + /// + /// The javascript. + DatabaseJavascript Javascript { get; } + + /// + /// Gets the with the specified name. + /// + /// + IMongoCollection this[string name] { get; } + + /// + /// Gets the collection names. + /// + /// + List GetCollectionNames(); + + /// + /// Gets the collection. + /// + /// The name. + /// + IMongoCollection GetCollection(string name); + + /// + /// Gets the collection. + /// + /// + /// The name. + /// + IMongoCollection GetCollection(string name) where T : class; + + /// + /// Gets the collection. + /// + /// + /// + IMongoCollection GetCollection() where T : class; + + /// + /// Gets the document that a reference is pointing to. + /// + /// The reference. + /// + Document FollowReference(DBRef reference); + + /// + /// Follows the reference. + /// + /// + /// The reference. + /// + T FollowReference(DBRef reference) where T:class; + + /// + /// Most operations do not have a return code in order to save the client from having to wait for results. + /// GetLastError can be called to retrieve the return code if clients want one. + /// + /// + Document GetLastError(); + + /// + /// Retrieves the last error and forces the database to fsync all files before returning. + /// + /// if set to true [fsync]. + /// + /// + /// Server version 1.3+ + /// + Document GetLastError(bool fsync); + + /// + /// Call after sending a bulk operation to the database. + /// + /// + Document GetPreviousError(); + + /// + /// Gets the sister database on the same Mongo connection with the given name. + /// + /// Name of the sister database. + /// + MongoDatabase GetSisterDatabase(string sisterDatabaseName); + + /// + /// Resets last error. This is good to call before a bulk operation. + /// + void ResetError(); + + /// + /// Evals the specified javascript. + /// + /// The javascript. + /// + Document Eval(string javascript); + + /// + /// Evals the specified javascript. + /// + /// The javascript. + /// The scope. + /// + Document Eval(string javascript, Document scope); + + /// + /// Evals the specified code scope. + /// + /// The code scope. + /// + Document Eval(CodeWScope codeScope); + + /// + /// Sends the command. + /// + /// The command name. + /// + Document SendCommand(string commandName); + + /// + /// Sends the command. + /// + /// The command. + /// + Document SendCommand(Document command); + + /// + /// Sends the command. + /// + /// Type of serialization root. + /// The CMD. + /// + Document SendCommand(Type rootType, Document command); + } +} \ No newline at end of file diff --git a/source/MongoDB/IndexOrder.cs b/source/MongoDB/IndexOrder.cs new file mode 100644 index 00000000..5b1927f5 --- /dev/null +++ b/source/MongoDB/IndexOrder.cs @@ -0,0 +1,16 @@ +namespace MongoDB +{ + /// + /// + /// + public enum IndexOrder { + /// + /// + /// + Descending = -1, + /// + /// + /// + Ascending = 1 + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/DocumentQuery.cs b/source/MongoDB/Linq/DocumentQuery.cs new file mode 100644 index 00000000..a3491129 --- /dev/null +++ b/source/MongoDB/Linq/DocumentQuery.cs @@ -0,0 +1,397 @@ +using System; + +namespace MongoDB.Linq { + + /// + /// This class is a construct for writing strongly typed query expressions for Document fields. + /// It is not meant to be used outside of expressions, since most functions and operators return + /// fake data and are only used to extract parameter information from expressions. + /// + internal class DocumentQuery { + /// + /// + /// + private readonly string key; + + /// + /// Initializes a new instance of the class. + /// + /// The document. + /// The key. + public DocumentQuery(Document document, string key) { + this.key = key; + } + + /// + /// Gets the key. + /// + /// The key. + public string Key { get { return key; } } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(DocumentQuery a, string b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(DocumentQuery a, string b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(string a, DocumentQuery b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(string a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(DocumentQuery a, int b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(int a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(DocumentQuery a, double b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(double a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(DocumentQuery a, DateTime b) { return false; } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(DateTime a, DocumentQuery b) { return false; } + + /// + /// Implements the operator >=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >=(DateTime a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(DateTime a, DocumentQuery b) { return false; } + + /// + /// Implements the operator <=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <=(DateTime a, DocumentQuery b) { return false; } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(DateTime a, DocumentQuery b) { return false; } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(DateTime a, DocumentQuery b) { return false; } + + /// + /// Equalses the specified other. + /// + /// The other. + /// + public bool Equals(DocumentQuery other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + return Equals(other.key, key); + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + if(obj.GetType() != typeof(DocumentQuery)) + return false; + return Equals((DocumentQuery)obj); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + return (key != null ? key.GetHashCode() : 0); + } + } +} diff --git a/source/MongoDB/Linq/ExecutionBuilder.cs b/source/MongoDB/Linq/ExecutionBuilder.cs new file mode 100644 index 00000000..5b810fcc --- /dev/null +++ b/source/MongoDB/Linq/ExecutionBuilder.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; +using MongoDB.Linq.Translators; + +namespace MongoDB.Linq +{ + internal class ExecutionBuilder : MongoExpressionVisitor + { + private Expression _provider; + + public Expression Build(Expression expression, Expression provider) + { + _provider = provider; + return Visit(expression); + } + + protected override Expression VisitProjection(ProjectionExpression projection) + { + var queryObject = new MongoQueryObjectBuilder().Build(projection); + queryObject.Projector = new ProjectionBuilder().Build(projection.Projector, queryObject.DocumentType, "document", queryObject.IsMapReduce); + queryObject.Aggregator = (LambdaExpression)Visit(projection.Aggregator); + + Expression result = Expression.Call( + _provider, + "ExecuteQueryObject", + Type.EmptyTypes, + Expression.Constant(queryObject, typeof(MongoQueryObject))); + + if (queryObject.Aggregator != null) + result = Expression.Convert(result, queryObject.Aggregator.Body.Type); + else + result = Expression.Convert(result, typeof(IEnumerable<>).MakeGenericType(queryObject.Projector.Body.Type)); + + return result; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/AggregateExpression.cs b/source/MongoDB/Linq/Expressions/AggregateExpression.cs new file mode 100644 index 00000000..2d46d7c3 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/AggregateExpression.cs @@ -0,0 +1,22 @@ +using System; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class AggregateExpression : MongoExpression + { + public AggregateType AggregateType { get; private set; } + + public Expression Argument { get; private set; } + + public bool Distinct { get; private set; } + + public AggregateExpression(Type type, AggregateType aggregateType, Expression argument, bool distinct) + : base(MongoExpressionType.Aggregate, type) + { + AggregateType = aggregateType; + Argument = argument; + Distinct = distinct; + } + } +} diff --git a/source/MongoDB/Linq/Expressions/AggregateSubqueryExpression.cs b/source/MongoDB/Linq/Expressions/AggregateSubqueryExpression.cs new file mode 100644 index 00000000..3a528589 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/AggregateSubqueryExpression.cs @@ -0,0 +1,21 @@ +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class AggregateSubqueryExpression : MongoExpression + { + public Expression AggregateInGroupSelect { get; private set; } + + public ScalarExpression AggregateAsSubquery { get; private set; } + + public Alias GroupByAlias { get; private set; } + + public AggregateSubqueryExpression(Alias groupByAlias, Expression aggregateInGroupSelect, ScalarExpression aggregateAsSubquery) + : base(MongoExpressionType.AggregateSubquery, aggregateAsSubquery.Type) + { + GroupByAlias = groupByAlias; + AggregateInGroupSelect = aggregateInGroupSelect; + AggregateAsSubquery = aggregateAsSubquery; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/AggregateType.cs b/source/MongoDB/Linq/Expressions/AggregateType.cs new file mode 100644 index 00000000..89f9c519 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/AggregateType.cs @@ -0,0 +1,11 @@ +namespace MongoDB.Linq.Expressions +{ + internal enum AggregateType + { + Count, + Min, + Max, + Average, + Sum + } +} diff --git a/source/MongoDB/Linq/Expressions/Alias.cs b/source/MongoDB/Linq/Expressions/Alias.cs new file mode 100644 index 00000000..a1989ee6 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/Alias.cs @@ -0,0 +1,5 @@ +namespace MongoDB.Linq.Expressions +{ + internal sealed class Alias + { } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/AliasedExpression.cs b/source/MongoDB/Linq/Expressions/AliasedExpression.cs new file mode 100644 index 00000000..5ae29e30 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/AliasedExpression.cs @@ -0,0 +1,15 @@ +using System; + +namespace MongoDB.Linq.Expressions +{ + internal abstract class AliasedExpression : MongoExpression + { + public Alias Alias { get; private set; } + + protected AliasedExpression(MongoExpressionType nodeType, Type type, Alias alias) + : base(nodeType, type) + { + Alias = alias; + } + } +} diff --git a/source/MongoDB/Linq/Expressions/CollectionExpression.cs b/source/MongoDB/Linq/Expressions/CollectionExpression.cs new file mode 100644 index 00000000..befc13f0 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/CollectionExpression.cs @@ -0,0 +1,21 @@ +using System; + +namespace MongoDB.Linq.Expressions +{ + internal class CollectionExpression : AliasedExpression + { + public string CollectionName { get; private set; } + + public IMongoDatabase Database { get; private set; } + + public Type DocumentType { get; private set; } + + public CollectionExpression(Alias alias, IMongoDatabase database, string collectionName, Type documentType) + : base(MongoExpressionType.Collection, typeof(void), alias) + { + CollectionName = collectionName; + Database = database; + DocumentType = documentType; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/ExpressionComparer.cs b/source/MongoDB/Linq/Expressions/ExpressionComparer.cs new file mode 100644 index 00000000..43298635 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/ExpressionComparer.cs @@ -0,0 +1,334 @@ +using System; +using System.Collections.ObjectModel; +using System.Linq.Expressions; +using System.Reflection; +using MongoDB.Util; + +namespace MongoDB.Linq.Expressions +{ + internal class ExpressionComparer + { + private ScopedDictionary _parameterScope; + + protected ExpressionComparer(ScopedDictionary parameterScope) + { + _parameterScope = parameterScope; + } + + public static bool AreEqual(Expression a, Expression b) + { + return AreEqual(null, a, b); + } + + public static bool AreEqual(ScopedDictionary parameterScope, Expression a, Expression b) + { + return new ExpressionComparer(parameterScope).Compare(a, b); + } + + protected virtual bool Compare(Expression a, Expression b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.NodeType != b.NodeType) + return false; + if (a.Type != b.Type) + return false; + switch (a.NodeType) + { + case ExpressionType.Negate: + case ExpressionType.NegateChecked: + case ExpressionType.Not: + case ExpressionType.Convert: + case ExpressionType.ConvertChecked: + case ExpressionType.ArrayLength: + case ExpressionType.Quote: + case ExpressionType.TypeAs: + case ExpressionType.UnaryPlus: + return CompareUnary((UnaryExpression)a, (UnaryExpression)b); + case ExpressionType.Add: + case ExpressionType.AddChecked: + case ExpressionType.Subtract: + case ExpressionType.SubtractChecked: + case ExpressionType.Multiply: + case ExpressionType.MultiplyChecked: + case ExpressionType.Divide: + case ExpressionType.Modulo: + case ExpressionType.And: + case ExpressionType.AndAlso: + case ExpressionType.Or: + case ExpressionType.OrElse: + case ExpressionType.LessThan: + case ExpressionType.LessThanOrEqual: + case ExpressionType.GreaterThan: + case ExpressionType.GreaterThanOrEqual: + case ExpressionType.Equal: + case ExpressionType.NotEqual: + case ExpressionType.Coalesce: + case ExpressionType.ArrayIndex: + case ExpressionType.RightShift: + case ExpressionType.LeftShift: + case ExpressionType.ExclusiveOr: + case ExpressionType.Power: + return CompareBinary((BinaryExpression)a, (BinaryExpression)b); + case ExpressionType.TypeIs: + return CompareTypeIs((TypeBinaryExpression)a, (TypeBinaryExpression)b); + case ExpressionType.Conditional: + return CompareConditional((ConditionalExpression)a, (ConditionalExpression)b); + case ExpressionType.Constant: + return CompareConstant((ConstantExpression)a, (ConstantExpression)b); + case ExpressionType.Parameter: + return CompareParameter((ParameterExpression)a, (ParameterExpression)b); + case ExpressionType.MemberAccess: + return CompareMemberAccess((MemberExpression)a, (MemberExpression)b); + case ExpressionType.Call: + return CompareMethodCall((MethodCallExpression)a, (MethodCallExpression)b); + case ExpressionType.Lambda: + return CompareLambda((LambdaExpression)a, (LambdaExpression)b); + case ExpressionType.New: + return CompareNew((NewExpression)a, (NewExpression)b); + case ExpressionType.NewArrayInit: + case ExpressionType.NewArrayBounds: + return CompareNewArray((NewArrayExpression)a, (NewArrayExpression)b); + case ExpressionType.Invoke: + return CompareInvocation((InvocationExpression)a, (InvocationExpression)b); + case ExpressionType.MemberInit: + return CompareMemberInit((MemberInitExpression)a, (MemberInitExpression)b); + case ExpressionType.ListInit: + return CompareListInit((ListInitExpression)a, (ListInitExpression)b); + default: + throw new Exception(string.Format("Unhandled expression type: '{0}'", a.NodeType)); + } + } + + protected virtual bool CompareUnary(UnaryExpression a, UnaryExpression b) + { + return a.NodeType == b.NodeType + && a.Method == b.Method + && a.IsLifted == b.IsLifted + && a.IsLiftedToNull == b.IsLiftedToNull + && Compare(a.Operand, b.Operand); + } + + protected virtual bool CompareBinary(BinaryExpression a, BinaryExpression b) + { + return a.NodeType == b.NodeType + && a.Method == b.Method + && a.IsLifted == b.IsLifted + && a.IsLiftedToNull == b.IsLiftedToNull + && Compare(a.Left, b.Left) + && Compare(a.Right, b.Right); + } + + protected virtual bool CompareTypeIs(TypeBinaryExpression a, TypeBinaryExpression b) + { + return a.TypeOperand == b.TypeOperand + && Compare(a.Expression, b.Expression); + } + + protected virtual bool CompareConditional(ConditionalExpression a, ConditionalExpression b) + { + return Compare(a.Test, b.Test) + && Compare(a.IfTrue, b.IfTrue) + && Compare(a.IfFalse, b.IfFalse); + } + + protected virtual bool CompareConstant(ConstantExpression a, ConstantExpression b) + { + return Equals(a.Value, b.Value); + } + + protected virtual bool CompareParameter(ParameterExpression a, ParameterExpression b) + { + if (_parameterScope != null) + { + ParameterExpression mapped; + if (_parameterScope.TryGetValue(a, out mapped)) + return mapped == b; + } + return a == b; + } + + protected virtual bool CompareMemberAccess(MemberExpression a, MemberExpression b) + { + return a.Member == b.Member + && Compare(a.Expression, b.Expression); + } + + protected virtual bool CompareMethodCall(MethodCallExpression a, MethodCallExpression b) + { + return a.Method == b.Method + && Compare(a.Object, b.Object) + && CompareExpressionList(a.Arguments, b.Arguments); + } + + protected virtual bool CompareLambda(LambdaExpression a, LambdaExpression b) + { + int n = a.Parameters.Count; + if (b.Parameters.Count != n) + return false; + // all must have same type + for (int i = 0; i < n; i++) + { + if (a.Parameters[i].Type != b.Parameters[i].Type) + return false; + } + var save = _parameterScope; + _parameterScope = new ScopedDictionary(_parameterScope); + try + { + for (int i = 0; i < n; i++) + { + _parameterScope.Add(a.Parameters[i], b.Parameters[i]); + } + return Compare(a.Body, b.Body); + } + finally + { + _parameterScope = save; + } + } + + protected virtual bool CompareNew(NewExpression a, NewExpression b) + { + return a.Constructor == b.Constructor + && CompareExpressionList(a.Arguments, b.Arguments) + && CompareMemberList(a.Members, b.Members); + } + + protected virtual bool CompareExpressionList(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (!Compare(a[i], b[i])) + return false; + } + return true; + } + + protected virtual bool CompareMemberList(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (a[i] != b[i]) + return false; + } + return true; + } + + protected virtual bool CompareNewArray(NewArrayExpression a, NewArrayExpression b) + { + return CompareExpressionList(a.Expressions, b.Expressions); + } + + protected virtual bool CompareInvocation(InvocationExpression a, InvocationExpression b) + { + return Compare(a.Expression, b.Expression) + && CompareExpressionList(a.Arguments, b.Arguments); + } + + protected virtual bool CompareMemberInit(MemberInitExpression a, MemberInitExpression b) + { + return Compare(a.NewExpression, b.NewExpression) + && CompareBindingList(a.Bindings, b.Bindings); + } + + protected virtual bool CompareBindingList(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (!CompareBinding(a[i], b[i])) + return false; + } + return true; + } + + protected virtual bool CompareBinding(MemberBinding a, MemberBinding b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.BindingType != b.BindingType) + return false; + if (a.Member != b.Member) + return false; + switch (a.BindingType) + { + case MemberBindingType.Assignment: + return CompareMemberAssignment((MemberAssignment)a, (MemberAssignment)b); + case MemberBindingType.ListBinding: + return CompareMemberListBinding((MemberListBinding)a, (MemberListBinding)b); + case MemberBindingType.MemberBinding: + return CompareMemberMemberBinding((MemberMemberBinding)a, (MemberMemberBinding)b); + default: + throw new Exception(string.Format("Unhandled binding type: '{0}'", a.BindingType)); + } + } + + protected virtual bool CompareMemberAssignment(MemberAssignment a, MemberAssignment b) + { + return a.Member == b.Member + && Compare(a.Expression, b.Expression); + } + + protected virtual bool CompareMemberListBinding(MemberListBinding a, MemberListBinding b) + { + return a.Member == b.Member + && CompareElementInitList(a.Initializers, b.Initializers); + } + + protected virtual bool CompareMemberMemberBinding(MemberMemberBinding a, MemberMemberBinding b) + { + return a.Member == b.Member + && CompareBindingList(a.Bindings, b.Bindings); + } + + protected virtual bool CompareListInit(ListInitExpression a, ListInitExpression b) + { + return Compare(a.NewExpression, b.NewExpression) + && CompareElementInitList(a.Initializers, b.Initializers); + } + + protected virtual bool CompareElementInitList(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (!CompareElementInit(a[i], b[i])) + return false; + } + return true; + } + + protected virtual bool CompareElementInit(ElementInit a, ElementInit b) + { + return a.AddMethod == b.AddMethod + && CompareExpressionList(a.Arguments, b.Arguments); + } + } +} \ No newline at end of file diff --git a/MongoDB.Linq/ExpressionTreeVisitor.cs b/source/MongoDB/Linq/Expressions/ExpressionVisitor.cs similarity index 75% rename from MongoDB.Linq/ExpressionTreeVisitor.cs rename to source/MongoDB/Linq/Expressions/ExpressionVisitor.cs index 55a27c8b..2289ea9b 100644 --- a/MongoDB.Linq/ExpressionTreeVisitor.cs +++ b/source/MongoDB/Linq/Expressions/ExpressionVisitor.cs @@ -3,12 +3,16 @@ using System.Collections.ObjectModel; using System.Linq.Expressions; -namespace MongoDB.Linq { - public abstract class ExpressionVisitor { - protected virtual Expression Visit(Expression exp) { +namespace MongoDB.Linq.Expressions +{ + internal abstract class ExpressionVisitor + { + protected virtual Expression Visit(Expression exp) + { if (exp == null) return exp; - switch (exp.NodeType) { + switch (exp.NodeType) + { case ExpressionType.Negate: case ExpressionType.NegateChecked: case ExpressionType.Not: @@ -17,6 +21,7 @@ protected virtual Expression Visit(Expression exp) { case ExpressionType.ArrayLength: case ExpressionType.Quote: case ExpressionType.TypeAs: + case ExpressionType.UnaryPlus: return this.VisitUnary((UnaryExpression)exp); case ExpressionType.Add: case ExpressionType.AddChecked: @@ -41,6 +46,7 @@ protected virtual Expression Visit(Expression exp) { case ExpressionType.RightShift: case ExpressionType.LeftShift: case ExpressionType.ExclusiveOr: + case ExpressionType.Power: return this.VisitBinary((BinaryExpression)exp); case ExpressionType.TypeIs: return this.VisitTypeIs((TypeBinaryExpression)exp); @@ -72,8 +78,10 @@ protected virtual Expression Visit(Expression exp) { } } - protected virtual MemberBinding VisitBinding(MemberBinding binding) { - switch (binding.BindingType) { + protected virtual MemberBinding VisitBinding(MemberBinding binding) + { + switch (binding.BindingType) + { case MemberBindingType.Assignment: return this.VisitMemberAssignment((MemberAssignment)binding); case MemberBindingType.MemberBinding: @@ -85,27 +93,33 @@ protected virtual MemberBinding VisitBinding(MemberBinding binding) { } } - protected virtual ElementInit VisitElementInitializer(ElementInit initializer) { + protected virtual ElementInit VisitElementInitializer(ElementInit initializer) + { ReadOnlyCollection arguments = this.VisitExpressionList(initializer.Arguments); - if (arguments != initializer.Arguments) { + if (arguments != initializer.Arguments) + { return Expression.ElementInit(initializer.AddMethod, arguments); } return initializer; } - protected virtual Expression VisitUnary(UnaryExpression u) { + protected virtual Expression VisitUnary(UnaryExpression u) + { Expression operand = this.Visit(u.Operand); - if (operand != u.Operand) { + if (operand != u.Operand) + { return Expression.MakeUnary(u.NodeType, operand, u.Type, u.Method); } return u; } - protected virtual Expression VisitBinary(BinaryExpression b) { + protected virtual Expression VisitBinary(BinaryExpression b) + { Expression left = this.Visit(b.Left); Expression right = this.Visit(b.Right); Expression conversion = this.Visit(b.Conversion); - if (left != b.Left || right != b.Right || conversion != b.Conversion) { + if (left != b.Left || right != b.Right || conversion != b.Conversion) + { if (b.NodeType == ExpressionType.Coalesce && b.Conversion != null) return Expression.Coalesce(left, right, conversion as LambdaExpression); else @@ -114,102 +128,134 @@ protected virtual Expression VisitBinary(BinaryExpression b) { return b; } - protected virtual Expression VisitTypeIs(TypeBinaryExpression b) { + protected virtual Expression VisitTypeIs(TypeBinaryExpression b) + { Expression expr = this.Visit(b.Expression); - if (expr != b.Expression) { + if (expr != b.Expression) + { return Expression.TypeIs(expr, b.TypeOperand); } return b; } - protected virtual Expression VisitConstant(ConstantExpression c) { + protected virtual Expression VisitConstant(ConstantExpression c) + { return c; } - protected virtual Expression VisitConditional(ConditionalExpression c) { + protected virtual Expression VisitConditional(ConditionalExpression c) + { Expression test = this.Visit(c.Test); Expression ifTrue = this.Visit(c.IfTrue); Expression ifFalse = this.Visit(c.IfFalse); - if (test != c.Test || ifTrue != c.IfTrue || ifFalse != c.IfFalse) { + if (test != c.Test || ifTrue != c.IfTrue || ifFalse != c.IfFalse) + { return Expression.Condition(test, ifTrue, ifFalse); } return c; } - protected virtual Expression VisitParameter(ParameterExpression p) { + protected virtual Expression VisitParameter(ParameterExpression p) + { return p; } - protected virtual Expression VisitMemberAccess(MemberExpression m) { + protected virtual Expression VisitMemberAccess(MemberExpression m) + { Expression exp = this.Visit(m.Expression); - if (exp != m.Expression) { + if (exp != m.Expression) + { return Expression.MakeMemberAccess(exp, m.Member); } return m; } - protected virtual Expression VisitMethodCall(MethodCallExpression m) { + protected virtual Expression VisitMethodCall(MethodCallExpression m) + { Expression obj = this.Visit(m.Object); IEnumerable args = this.VisitExpressionList(m.Arguments); - if (obj != m.Object || args != m.Arguments) { + if (obj != m.Object || args != m.Arguments) + { return Expression.Call(obj, m.Method, args); } return m; } - protected virtual ReadOnlyCollection VisitExpressionList(ReadOnlyCollection original) { + protected virtual ReadOnlyCollection VisitExpressionList(ReadOnlyCollection original) + { + if (original == null) + return original; + List list = null; - for (int i = 0, n = original.Count; i < n; i++) { + for (int i = 0, n = original.Count; i < n; i++) + { Expression p = this.Visit(original[i]); - if (list != null) { + if (list != null) + { list.Add(p); - } else if (p != original[i]) { + } + else if (p != original[i]) + { list = new List(n); - for (int j = 0; j < i; j++) { + for (int j = 0; j < i; j++) + { list.Add(original[j]); } list.Add(p); } } - if (list != null) { + if (list != null) + { return list.AsReadOnly(); } return original; } - protected virtual MemberAssignment VisitMemberAssignment(MemberAssignment assignment) { + protected virtual MemberAssignment VisitMemberAssignment(MemberAssignment assignment) + { Expression e = this.Visit(assignment.Expression); - if (e != assignment.Expression) { + if (e != assignment.Expression) + { return Expression.Bind(assignment.Member, e); } return assignment; } - protected virtual MemberMemberBinding VisitMemberMemberBinding(MemberMemberBinding binding) { + protected virtual MemberMemberBinding VisitMemberMemberBinding(MemberMemberBinding binding) + { IEnumerable bindings = this.VisitBindingList(binding.Bindings); - if (bindings != binding.Bindings) { + if (bindings != binding.Bindings) + { return Expression.MemberBind(binding.Member, bindings); } return binding; } - protected virtual MemberListBinding VisitMemberListBinding(MemberListBinding binding) { + protected virtual MemberListBinding VisitMemberListBinding(MemberListBinding binding) + { IEnumerable initializers = this.VisitElementInitializerList(binding.Initializers); - if (initializers != binding.Initializers) { + if (initializers != binding.Initializers) + { return Expression.ListBind(binding.Member, initializers); } return binding; } - protected virtual IEnumerable VisitBindingList(ReadOnlyCollection original) { + protected virtual IEnumerable VisitBindingList(ReadOnlyCollection original) + { List list = null; - for (int i = 0, n = original.Count; i < n; i++) { + for (int i = 0, n = original.Count; i < n; i++) + { MemberBinding b = this.VisitBinding(original[i]); - if (list != null) { + if (list != null) + { list.Add(b); - } else if (b != original[i]) { + } + else if (b != original[i]) + { list = new List(n); - for (int j = 0; j < i; j++) { + for (int j = 0; j < i; j++) + { list.Add(original[j]); } list.Add(b); @@ -220,15 +266,21 @@ protected virtual IEnumerable VisitBindingList(ReadOnlyCollection return original; } - protected virtual IEnumerable VisitElementInitializerList(ReadOnlyCollection original) { + protected virtual IEnumerable VisitElementInitializerList(ReadOnlyCollection original) + { List list = null; - for (int i = 0, n = original.Count; i < n; i++) { + for (int i = 0, n = original.Count; i < n; i++) + { ElementInit init = this.VisitElementInitializer(original[i]); - if (list != null) { + if (list != null) + { list.Add(init); - } else if (init != original[i]) { + } + else if (init != original[i]) + { list = new List(n); - for (int j = 0; j < i; j++) { + for (int j = 0; j < i; j++) + { list.Add(original[j]); } list.Add(init); @@ -239,62 +291,71 @@ protected virtual IEnumerable VisitElementInitializerList(ReadOnlyC return original; } - protected virtual Expression VisitLambda(LambdaExpression lambda) { + protected virtual Expression VisitLambda(LambdaExpression lambda) + { Expression body = this.Visit(lambda.Body); - if (body != lambda.Body) { + if (body != lambda.Body) + { return Expression.Lambda(lambda.Type, body, lambda.Parameters); } return lambda; } - protected virtual NewExpression VisitNew(NewExpression nex) { + protected virtual NewExpression VisitNew(NewExpression nex) + { IEnumerable args = this.VisitExpressionList(nex.Arguments); - if (args != nex.Arguments) { + if (args != nex.Arguments) + { if (nex.Members != null) return Expression.New(nex.Constructor, args, nex.Members); - else - return Expression.New(nex.Constructor, args); + return Expression.New(nex.Constructor, args); } return nex; } - protected virtual Expression VisitMemberInit(MemberInitExpression init) { + protected virtual Expression VisitMemberInit(MemberInitExpression init) + { NewExpression n = this.VisitNew(init.NewExpression); IEnumerable bindings = this.VisitBindingList(init.Bindings); - if (n != init.NewExpression || bindings != init.Bindings) { + if (n != init.NewExpression || bindings != init.Bindings) + { return Expression.MemberInit(n, bindings); } return init; } - protected virtual Expression VisitListInit(ListInitExpression init) { + protected virtual Expression VisitListInit(ListInitExpression init) + { NewExpression n = this.VisitNew(init.NewExpression); IEnumerable initializers = this.VisitElementInitializerList(init.Initializers); - if (n != init.NewExpression || initializers != init.Initializers) { + if (n != init.NewExpression || initializers != init.Initializers) + { return Expression.ListInit(n, initializers); } return init; } - protected virtual Expression VisitNewArray(NewArrayExpression na) { + protected virtual Expression VisitNewArray(NewArrayExpression na) + { IEnumerable exprs = this.VisitExpressionList(na.Expressions); - if (exprs != na.Expressions) { - if (na.NodeType == ExpressionType.NewArrayInit) { - return Expression.NewArrayInit(na.Type.GetElementType(), exprs); - } else { - return Expression.NewArrayBounds(na.Type.GetElementType(), exprs); - } + if (exprs != na.Expressions) + { + return na.NodeType == ExpressionType.NewArrayInit + ? Expression.NewArrayInit(na.Type.GetElementType(), exprs) + : Expression.NewArrayBounds(na.Type.GetElementType(), exprs); } return na; } - protected virtual Expression VisitInvocation(InvocationExpression iv) { + protected virtual Expression VisitInvocation(InvocationExpression iv) + { IEnumerable args = this.VisitExpressionList(iv.Arguments); Expression expr = this.Visit(iv.Expression); - if (args != iv.Arguments || expr != iv.Expression) { + if (args != iv.Arguments || expr != iv.Expression) + { return Expression.Invoke(expr, args); } return iv; } } -} +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/FieldDeclaration.cs b/source/MongoDB/Linq/Expressions/FieldDeclaration.cs new file mode 100644 index 00000000..373ef8c0 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/FieldDeclaration.cs @@ -0,0 +1,27 @@ +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class FieldDeclaration + { + private readonly string _name; + private readonly Expression _expression; + + public string Name + { + get { return _name; } + } + + public Expression Expression + { + get { return _expression; } + } + + public FieldDeclaration(string name, Expression expression) + { + _name = name; + _expression = expression; + } + + } +} diff --git a/source/MongoDB/Linq/Expressions/FieldExpression.cs b/source/MongoDB/Linq/Expressions/FieldExpression.cs new file mode 100644 index 00000000..bb020c75 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/FieldExpression.cs @@ -0,0 +1,21 @@ +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class FieldExpression : MongoExpression + { + public Alias Alias { get; private set; } + + public Expression Expression { get; private set; } + + public string Name { get; private set; } + + public FieldExpression(Expression expression, Alias alias, string name) + : base(MongoExpressionType.Field, expression.Type) + { + Alias = alias; + Expression = expression; + Name = name; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/MongoExpression.cs b/source/MongoDB/Linq/Expressions/MongoExpression.cs new file mode 100644 index 00000000..3ac9e468 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/MongoExpression.cs @@ -0,0 +1,12 @@ +using System; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal abstract class MongoExpression : Expression + { + protected MongoExpression(MongoExpressionType nodeType, Type type) + : base((ExpressionType)nodeType, type) + { } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/MongoExpressionComparer.cs b/source/MongoDB/Linq/Expressions/MongoExpressionComparer.cs new file mode 100644 index 00000000..8c554b0e --- /dev/null +++ b/source/MongoDB/Linq/Expressions/MongoExpressionComparer.cs @@ -0,0 +1,205 @@ +using System.Collections.ObjectModel; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Util; +using MongoDB.Linq.Translators; + +namespace MongoDB.Linq.Expressions +{ + internal class MongoExpressionComparer : ExpressionComparer + { + private ScopedDictionary _aliasScope; + + protected MongoExpressionComparer(ScopedDictionary parameterScope, ScopedDictionary aliasScope) + : base(parameterScope) + { + _aliasScope = aliasScope; + } + + public new static bool AreEqual(Expression a, Expression b) + { + return AreEqual(null, null, a, b); + } + + public static bool AreEqual(ScopedDictionary parameterScope, ScopedDictionary aliasScope, Expression a, Expression b) + { + return new MongoExpressionComparer(parameterScope, aliasScope).Compare(a, b); + } + + protected override bool Compare(Expression a, Expression b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.NodeType != b.NodeType) + return false; + if (a.Type != b.Type) + return false; + switch ((MongoExpressionType)a.NodeType) + { + case MongoExpressionType.Collection: + return CompareCollection((CollectionExpression)a, (CollectionExpression)b); + case MongoExpressionType.Field: + return CompareField((FieldExpression)a, (FieldExpression)b); + case MongoExpressionType.Select: + return CompareSelect((SelectExpression)a, (SelectExpression)b); + case MongoExpressionType.Aggregate: + return CompareAggregate((AggregateExpression)a, (AggregateExpression)b); + case MongoExpressionType.Scalar: + return CompareSubquery((SubqueryExpression)a, (SubqueryExpression)b); + case MongoExpressionType.AggregateSubquery: + return CompareAggregateSubquery((AggregateSubqueryExpression)a, (AggregateSubqueryExpression)b); + case MongoExpressionType.Projection: + return CompareProjection((ProjectionExpression)a, (ProjectionExpression)b); + default: + return base.Compare(a, b); + } + } + + protected virtual bool CompareCollection(CollectionExpression a, CollectionExpression b) + { + return a.CollectionName == b.CollectionName; + } + + protected virtual bool CompareField(FieldExpression a, FieldExpression b) + { + return CompareAlias(a.Alias, b.Alias) && a.Name == b.Name && Compare(a.Expression, b.Expression); + } + + protected virtual bool CompareAlias(Alias a, Alias b) + { + if (_aliasScope != null) + { + Alias mapped; + if (_aliasScope.TryGetValue(a, out mapped)) + return mapped == b; + } + return a == b; + } + + protected virtual bool CompareSelect(SelectExpression a, SelectExpression b) + { + var save = _aliasScope; + try + { + if (!Compare(a.From, b.From)) + return false; + + _aliasScope = new ScopedDictionary(save); + MapAliases(a.From, b.From); + + return Compare(a.Where, b.Where) + && CompareOrderList(a.OrderBy, b.OrderBy) + && Compare(a.GroupBy, b.GroupBy) + && Compare(a.Skip, b.Skip) + && Compare(a.Take, b.Take) + && a.IsDistinct == b.IsDistinct + && CompareFieldDeclarations(a.Fields, b.Fields); + } + finally + { + _aliasScope = save; + } + } + + protected virtual bool CompareOrderList(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (a[i].OrderType != b[i].OrderType || + !Compare(a[i].Expression, b[i].Expression)) + return false; + } + return true; + } + + protected virtual bool CompareFieldDeclarations(ReadOnlyCollection a, ReadOnlyCollection b) + { + if (a == b) + return true; + if (a == null || b == null) + return false; + if (a.Count != b.Count) + return false; + for (int i = 0, n = a.Count; i < n; i++) + { + if (!CompareFieldDeclaration(a[i], b[i])) + return false; + } + return true; + } + + protected virtual bool CompareFieldDeclaration(FieldDeclaration a, FieldDeclaration b) + { + return a.Name == b.Name && Compare(a.Expression, b.Expression); + } + + protected virtual bool CompareAggregate(AggregateExpression a, AggregateExpression b) + { + return a.AggregateType == b.AggregateType && Compare(a.Argument, b.Argument); + } + + protected virtual bool CompareSubquery(SubqueryExpression a, SubqueryExpression b) + { + if (a.NodeType != b.NodeType) + return false; + switch ((MongoExpressionType)a.NodeType) + { + case MongoExpressionType.Scalar: + return CompareScalar((ScalarExpression)a, (ScalarExpression)b); + } + return false; + } + + protected virtual bool CompareScalar(ScalarExpression a, ScalarExpression b) + { + return Compare(a.Select, b.Select); + } + + protected virtual bool CompareAggregateSubquery(AggregateSubqueryExpression a, AggregateSubqueryExpression b) + { + return Compare(a.AggregateAsSubquery, b.AggregateAsSubquery) + && Compare(a.AggregateInGroupSelect, b.AggregateInGroupSelect) + && a.GroupByAlias == b.GroupByAlias; + } + + protected virtual bool CompareProjection(ProjectionExpression a, ProjectionExpression b) + { + if (!Compare(a.Source, b.Source)) + return false; + + var save = _aliasScope; + try + { + _aliasScope = new ScopedDictionary(_aliasScope); + _aliasScope.Add(a.Source.Alias, b.Source.Alias); + + return Compare(a.Projector, b.Projector) + && Compare(a.Aggregator, b.Aggregator) + && a.IsSingleton == b.IsSingleton; + } + finally + { + _aliasScope = save; + } + } + + private void MapAliases(Expression a, Expression b) + { + var gatherer = new DeclaredAliasGatherer(); + Alias[] prodA = gatherer.Gather(a).ToArray(); + Alias[] prodB = gatherer.Gather(b).ToArray(); + for (int i = 0, n = prodA.Length; i < n; i++) + { + _aliasScope.Add(prodA[i], prodB[i]); + } + } + } +} diff --git a/source/MongoDB/Linq/Expressions/MongoExpressionExtensions.cs b/source/MongoDB/Linq/Expressions/MongoExpressionExtensions.cs new file mode 100644 index 00000000..2c42df52 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/MongoExpressionExtensions.cs @@ -0,0 +1,67 @@ +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal static class MongoExpressionExtensions + { + public static bool HasSelectAllField(this IEnumerable fields) + { + return fields == null || fields.Any(f => f.Name == "*"); + } + + public static SelectExpression AddField(this SelectExpression select, FieldDeclaration field) + { + var fields = new List(select.Fields) {field}; + return select.SetFields(fields); + } + + public static string GetAvailableFieldName(this SelectExpression select, string baseName) + { + var name = baseName; + var n = 0; + while(!IsUniqueName(select, name)) + name = baseName + (n++); + return name; + } + + public static SelectExpression RemoveField(this SelectExpression select, FieldDeclaration field) + { + var fields = new List(select.Fields); + fields.Remove(field); + return select.SetFields(fields); + } + + public static SelectExpression SetFields(this SelectExpression select, IEnumerable fields) + { + return new SelectExpression(select.Alias, + fields.OrderBy(f => f.Name), + select.From, + select.Where, + select.OrderBy, + select.GroupBy, + select.IsDistinct, + select.Skip, + select.Take); + } + + public static SelectExpression SetWhere(this SelectExpression select, Expression where) + { + return new SelectExpression(select.Alias, + select.Fields, + select.From, + where, + select.OrderBy, + select.GroupBy, + select.IsDistinct, + select.Skip, + select.Take); + } + + private static bool IsUniqueName(SelectExpression select, string name) + { + return select.Fields.All(field => field.Name != name); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/MongoExpressionType.cs b/source/MongoDB/Linq/Expressions/MongoExpressionType.cs new file mode 100644 index 00000000..cdb8f41a --- /dev/null +++ b/source/MongoDB/Linq/Expressions/MongoExpressionType.cs @@ -0,0 +1,18 @@ +namespace MongoDB.Linq.Expressions +{ + internal enum MongoExpressionType + { + Collection = 1000, + ClientJoin, + Field, + Select, + Projection, + Join, + Order, + Aggregate, + AggregateSubquery, + Scalar, + OuterJoined, + NamedValue + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/MongoExpressionVisitor.cs b/source/MongoDB/Linq/Expressions/MongoExpressionVisitor.cs new file mode 100644 index 00000000..b7a1effe --- /dev/null +++ b/source/MongoDB/Linq/Expressions/MongoExpressionVisitor.cs @@ -0,0 +1,153 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class MongoExpressionVisitor : ExpressionVisitor + { + protected override Expression Visit(Expression exp) + { + if (exp == null) + return null; + switch ((MongoExpressionType)exp.NodeType) + { + case MongoExpressionType.Collection: + return VisitCollection((CollectionExpression)exp); + case MongoExpressionType.Field: + return VisitField((FieldExpression)exp); + case MongoExpressionType.Projection: + return VisitProjection((ProjectionExpression)exp); + case MongoExpressionType.Select: + return VisitSelect((SelectExpression)exp); + case MongoExpressionType.Aggregate: + return VisitAggregate((AggregateExpression)exp); + case MongoExpressionType.AggregateSubquery: + return VisitAggregateSubquery((AggregateSubqueryExpression)exp); + case MongoExpressionType.Scalar: + return VisitScalar((ScalarExpression)exp); + default: + return base.Visit(exp); + } + } + + protected virtual Expression VisitAggregate(AggregateExpression aggregate) + { + var exp = Visit(aggregate.Argument); + if (exp != aggregate.Argument) + return new AggregateExpression(aggregate.Type, aggregate.AggregateType, exp, aggregate.Distinct); + + return aggregate; + } + + protected virtual Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregateSubquery) + { + Expression e = Visit(aggregateSubquery.AggregateAsSubquery); + ScalarExpression subquery = (ScalarExpression)e; + if (subquery != aggregateSubquery.AggregateAsSubquery) + return new AggregateSubqueryExpression(aggregateSubquery.GroupByAlias, aggregateSubquery.AggregateInGroupSelect, subquery); + return aggregateSubquery; + } + + protected virtual Expression VisitCollection(CollectionExpression collection) + { + return collection; + } + + protected virtual Expression VisitField(FieldExpression field) + { + var e = Visit(field.Expression); + if (field.Expression != e) + field = new FieldExpression(e, field.Alias, field.Name); + + return field; + } + + protected virtual Expression VisitProjection(ProjectionExpression projection) + { + var source = (SelectExpression)Visit(projection.Source); + var projector = Visit(projection.Projector); + if (source != projection.Source || projector != projection.Projector) + return new ProjectionExpression(source, projector, projection.Aggregator); + return projection; + } + + protected ReadOnlyCollection VisitOrderBy(ReadOnlyCollection orderBys) + { + if (orderBys != null) + { + List alternate = null; + for (int i = 0, n = orderBys.Count; i < n; i++) + { + OrderExpression expr = orderBys[i]; + Expression e = this.Visit(expr.Expression); + if (alternate == null && e != expr.Expression) + alternate = orderBys.Take(i).ToList(); + if (alternate != null) + alternate.Add(new OrderExpression(expr.OrderType, e)); + } + if (alternate != null) + return alternate.AsReadOnly(); + } + return orderBys; + } + + protected virtual Expression VisitScalar(ScalarExpression scalar) + { + SelectExpression select = (SelectExpression)Visit(scalar.Select); + if (select != scalar.Select) + return new ScalarExpression(scalar.Type, select); + return scalar; + } + + protected virtual Expression VisitSelect(SelectExpression select) + { + var from = VisitSource(select.From); + var where = Visit(select.Where); + var groupBy = Visit(select.GroupBy); + var orderBy = VisitOrderBy(select.OrderBy); + var skip = Visit(select.Skip); + var take = Visit(select.Take); + var fields = VisitFieldDeclarationList(select.Fields); + if (from != select.From || where != select.Where || orderBy != select.OrderBy || groupBy != select.GroupBy || skip != select.Skip || take != select.Take || fields != select.Fields) + return new SelectExpression(select.Alias, fields, from, where, orderBy, groupBy, select.IsDistinct, skip, take); + return select; + } + + protected virtual Expression VisitSource(Expression source) + { + return Visit(source); + } + + protected virtual Expression VisitSubquery(SubqueryExpression subquery) + { + switch ((MongoExpressionType)subquery.NodeType) + { + case MongoExpressionType.Scalar: + return VisitScalar((ScalarExpression)subquery); + } + return subquery; + } + + protected virtual ReadOnlyCollection VisitFieldDeclarationList(ReadOnlyCollection fields) + { + if (fields == null) + return fields; + + List alternate = null; + for (int i = 0, n = fields.Count; i < n; i++) + { + var f = fields[i]; + var e = Visit(f.Expression); + if (f.Expression != e && alternate == null) + alternate = fields.Take(i).ToList(); + if (alternate != null) + alternate.Add(new FieldDeclaration(f.Name, e)); + } + if (alternate != null) + return alternate.AsReadOnly(); + return fields; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/OrderExpression.cs b/source/MongoDB/Linq/Expressions/OrderExpression.cs new file mode 100644 index 00000000..78541a97 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/OrderExpression.cs @@ -0,0 +1,22 @@ +using System; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class OrderExpression : MongoExpression + { + public Expression Expression { get; private set; } + + public OrderType OrderType { get; private set; } + + public OrderExpression(OrderType orderType, Expression expression) + : base(MongoExpressionType.Order, expression.Type) + { + if (expression == null) + throw new ArgumentNullException("expression"); + + Expression = expression; + OrderType = orderType; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/OrderType.cs b/source/MongoDB/Linq/Expressions/OrderType.cs new file mode 100644 index 00000000..32e823a0 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/OrderType.cs @@ -0,0 +1,8 @@ +namespace MongoDB.Linq.Expressions +{ + internal enum OrderType + { + Ascending, + Descending + } +} diff --git a/source/MongoDB/Linq/Expressions/ProjectionExpression.cs b/source/MongoDB/Linq/Expressions/ProjectionExpression.cs new file mode 100644 index 00000000..e780f2fc --- /dev/null +++ b/source/MongoDB/Linq/Expressions/ProjectionExpression.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Expressions +{ + internal class ProjectionExpression : MongoExpression + { + public LambdaExpression Aggregator { get; private set; } + + public bool IsSingleton + { + get { return Aggregator != null && Aggregator.Body.Type == Projector.Type; } + } + + public Expression Projector { get; private set; } + + public SelectExpression Source { get; private set; } + + public ProjectionExpression(SelectExpression source, Expression projector) + : this(source, projector, null) + { } + + public ProjectionExpression(SelectExpression source, Expression projector, LambdaExpression aggregator) + : base(MongoExpressionType.Projection, aggregator != null ? aggregator.Body.Type : typeof(IEnumerable<>).MakeGenericType(projector.Type)) + { + Source = source; + Projector = projector; + Aggregator = aggregator; + } + } +} diff --git a/source/MongoDB/Linq/Expressions/ScalarExpression.cs b/source/MongoDB/Linq/Expressions/ScalarExpression.cs new file mode 100644 index 00000000..78623934 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/ScalarExpression.cs @@ -0,0 +1,11 @@ +using System; + +namespace MongoDB.Linq.Expressions +{ + internal class ScalarExpression : SubqueryExpression + { + public ScalarExpression(Type type, SelectExpression select) + : base(MongoExpressionType.Scalar, type, select) + { } + } +} diff --git a/source/MongoDB/Linq/Expressions/SelectExpression.cs b/source/MongoDB/Linq/Expressions/SelectExpression.cs new file mode 100644 index 00000000..b8d64343 --- /dev/null +++ b/source/MongoDB/Linq/Expressions/SelectExpression.cs @@ -0,0 +1,83 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using System.Collections.ObjectModel; + +namespace MongoDB.Linq.Expressions +{ + internal class SelectExpression : AliasedExpression + { + private readonly bool _isDistinct; + private readonly ReadOnlyCollection _fields; + private readonly Expression _from; + private readonly Expression _groupBy; + private readonly Expression _take; + private readonly ReadOnlyCollection _orderBy; + private readonly Expression _skip; + private readonly Expression _where; + + public bool IsDistinct + { + get { return _isDistinct; } + } + + public ReadOnlyCollection Fields + { + get { return _fields; } + } + + public Expression From + { + get { return _from; } + } + + public Expression GroupBy + { + get { return _groupBy; } + } + + public Expression Take + { + get { return _take; } + } + + public ReadOnlyCollection OrderBy + { + get { return _orderBy; } + } + + public Expression Skip + { + get { return _skip; } + } + + public Expression Where + { + get { return _where; } + } + + public SelectExpression(Alias alias, IEnumerable fields, Expression from, Expression where) + : this(alias, fields, from, where, null, null) + { } + + public SelectExpression(Alias alias, IEnumerable fields, Expression from, Expression where, IEnumerable orderBy, Expression groupBy) + : this(alias, fields, from, where, orderBy, groupBy, false, null, null) + { } + + public SelectExpression(Alias alias, IEnumerable fields, Expression from, Expression where, IEnumerable orderBy, Expression groupBy, bool isDistinct, Expression skip, Expression take) + : base(MongoExpressionType.Select, typeof(void), alias) + { + _fields = fields as ReadOnlyCollection ?? new List(fields).AsReadOnly(); + + _orderBy = orderBy as ReadOnlyCollection; + if (_orderBy == null && orderBy != null) + _orderBy = new List(orderBy).AsReadOnly(); + + _isDistinct = isDistinct; + _from = from; + _groupBy = groupBy; + _take = take; + _where = where; + _skip = skip; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Expressions/SubqueryExpression.cs b/source/MongoDB/Linq/Expressions/SubqueryExpression.cs new file mode 100644 index 00000000..5aa86cfa --- /dev/null +++ b/source/MongoDB/Linq/Expressions/SubqueryExpression.cs @@ -0,0 +1,15 @@ +using System; + +namespace MongoDB.Linq.Expressions +{ + internal abstract class SubqueryExpression : MongoExpression + { + public SelectExpression Select { get; private set; } + + protected SubqueryExpression(MongoExpressionType nodeType, Type type, SelectExpression select) + : base(nodeType, type) + { + Select = select; + } + } +} diff --git a/source/MongoDB/Linq/Grouping.cs b/source/MongoDB/Linq/Grouping.cs new file mode 100644 index 00000000..39a16c0e --- /dev/null +++ b/source/MongoDB/Linq/Grouping.cs @@ -0,0 +1,62 @@ +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Linq +{ + /// + /// + /// + /// The type of the key. + /// The type of the element. + internal class Grouping : IGrouping + { + private readonly TKey _key; + private readonly IEnumerable _group; + + /// + /// Initializes a new instance of the class. + /// + /// The key. + /// The group. + public Grouping(TKey key, IEnumerable group) + { + _key = key; + _group = group; + } + + /// + /// Gets the key of the . + /// + /// + /// + /// The key of the . + /// + public TKey Key + { + get { return _key; } + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + public IEnumerator GetEnumerator() + { + return _group.GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return _group.GetEnumerator(); + } + } +} diff --git a/source/MongoDB/Linq/IMongoQueryable.cs b/source/MongoDB/Linq/IMongoQueryable.cs new file mode 100644 index 00000000..06a398d2 --- /dev/null +++ b/source/MongoDB/Linq/IMongoQueryable.cs @@ -0,0 +1,13 @@ +using System.Linq; + +namespace MongoDB.Linq +{ + internal interface IMongoQueryable : IQueryable + { + string CollectionName { get; } + + IMongoDatabase Database { get; } + + MongoQueryObject GetQueryObject(); + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/MongoQuery.cs b/source/MongoDB/Linq/MongoQuery.cs new file mode 100644 index 00000000..9a12c70a --- /dev/null +++ b/source/MongoDB/Linq/MongoQuery.cs @@ -0,0 +1,142 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; + +namespace MongoDB.Linq +{ + /// + /// + /// + /// + internal class MongoQuery : IOrderedQueryable, IMongoQueryable + { + private readonly Expression _expression; + private readonly MongoQueryProvider _provider; + + /// + /// Gets the expression tree that is associated with the instance of . + /// + /// + /// + /// The that is associated with this instance of . + /// + Expression IQueryable.Expression + { + get { return _expression; } + } + + /// + /// Gets the type of the element(s) that are returned when the expression tree associated with this instance of is executed. + /// + /// + /// + /// A that represents the type of the element(s) that are returned when the expression tree associated with this object is executed. + /// + Type IQueryable.ElementType + { + get { return typeof(T); } + } + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + string IMongoQueryable.CollectionName + { + get { return _provider.CollectionName; } + } + + /// + /// Gets the database. + /// + /// The database. + IMongoDatabase IMongoQueryable.Database + { + get { return _provider.Database; } + } + + /// + /// Gets the query provider that is associated with this data source. + /// + /// + /// + /// The that is associated with this data source. + /// + IQueryProvider IQueryable.Provider + { + get { return this._provider; } + } + + /// + /// Initializes a new instance of the class. + /// + /// The provider. + public MongoQuery(MongoQueryProvider provider) + { + if (provider == null) + throw new ArgumentNullException("provider"); + + this._expression = Expression.Constant(this); + this._provider = provider; + } + + /// + /// Initializes a new instance of the class. + /// + /// The provider. + /// The expression. + public MongoQuery(MongoQueryProvider provider, Expression expression) + { + if (provider == null) + throw new ArgumentNullException("provider"); + if (expression == null) + throw new ArgumentNullException("expression"); + + if (!typeof(IQueryable).IsAssignableFrom(expression.Type)) + throw new ArgumentOutOfRangeException("expression"); + this._provider = provider; + this._expression = expression; + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// A that can be used to iterate through the collection. + /// + public IEnumerator GetEnumerator() { + return ((IEnumerable)this._provider.Execute(_expression)).GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() { + return ((IEnumerable)this._provider.Execute(_expression)).GetEnumerator(); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() { + return _provider.GetQueryObject(_expression).ToString(); + } + + /// + /// Gets the query object. + /// + /// + MongoQueryObject IMongoQueryable.GetQueryObject() + { + return _provider.GetQueryObject(_expression); + } + } +} diff --git a/source/MongoDB/Linq/MongoQueryObject.cs b/source/MongoDB/Linq/MongoQueryObject.cs new file mode 100644 index 00000000..ccf3a37b --- /dev/null +++ b/source/MongoDB/Linq/MongoQueryObject.cs @@ -0,0 +1,150 @@ +using System; +using System.Linq.Expressions; + +namespace MongoDB.Linq +{ + internal class MongoQueryObject + { + /// + /// Gets or sets the aggregator. + /// + /// The aggregator. + public LambdaExpression Aggregator { get; set; } + + /// + /// Gets or sets the name of the collection. + /// + /// The name of the collection. + public string CollectionName { get; set; } + + /// + /// Gets or sets the database. + /// + /// The database. + public IMongoDatabase Database { get; set; } + + /// + /// Gets or sets the type of the document. + /// + /// The type of the document. + public Type DocumentType { get; set; } + + /// + /// Gets or sets the fields. + /// + /// The fields. + public Document Fields { get; set; } + + /// + /// Gets or sets the finalizer function. + /// + /// The finalizer function. + public string FinalizerFunction { get; set; } + + /// + /// Gets or sets a value indicating whether this is a count query. + /// + /// true if this is a count query; otherwise, false. + public bool IsCount { get; set; } + + /// + /// Gets or sets a value indicating whether this instance is map reduce. + /// + /// + /// true if this instance is map reduce; otherwise, false. + /// + public bool IsMapReduce { get; set; } + + /// + /// Gets or sets the map function. + /// + /// The map function. + public string MapFunction { get; set; } + + /// + /// Gets or sets the reduce function. + /// + /// The reduce function. + public string ReduceFunction { get; set; } + + /// + /// Gets or sets the number to skip. + /// + /// The number to skip. + public int NumberToSkip { get; set; } + + /// + /// Gets or sets the number to limit. + /// + /// The number to limit. + public int NumberToLimit { get; set; } + + /// + /// Gets or sets the projector. + /// + /// The projector. + public LambdaExpression Projector { get; set; } + + /// + /// Gets or sets the query. + /// + /// The query. + public Document Query { get; private set; } + + /// + /// Gets the sort. + /// + /// The sort. + public Document Sort { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + public MongoQueryObject() + { + Fields = new Document(); + Query = new Document(); + } + + /// + /// Adds the sort. + /// + /// The name. + /// The value. + public void AddSort(string name, int value) + { + if(Sort == null) + Sort = new Document(); + Sort.Add(name, value); + } + + /// + /// Sets the query document. + /// + /// The document. + public void SetQueryDocument(Document document) + { + Query = document; + } + + /// + /// Sets the where clause. + /// + /// The where clause. + public void SetWhereClause(string whereClause) + { + Query = Op.Where(whereClause); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return "queryobject"; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/MongoQueryProvider.cs b/source/MongoDB/Linq/MongoQueryProvider.cs new file mode 100644 index 00000000..9a93cb00 --- /dev/null +++ b/source/MongoDB/Linq/MongoQueryProvider.cs @@ -0,0 +1,330 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using MongoDB.Commands; +using MongoDB.Linq.Expressions; +using MongoDB.Linq.Translators; +using MongoDB.Util; + +namespace MongoDB.Linq +{ + /// + /// + /// + internal class MongoQueryProvider : IQueryProvider + { + private readonly string _collectionName; + private readonly IMongoDatabase _database; + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + public string CollectionName + { + get { return _collectionName; } + } + + /// + /// Gets the database. + /// + /// The database. + public IMongoDatabase Database + { + get { return _database; } + } + + /// + /// Initializes a new instance of the class. + /// + /// The database. + /// Name of the collection. + public MongoQueryProvider(IMongoDatabase database, string collectionName) + { + if (database == null) + throw new ArgumentNullException("database"); + if (collectionName == null) + throw new ArgumentNullException("collectionName"); + + _collectionName = collectionName; + _database = database; + } + + /// + /// Creates the query. + /// + /// The type of the element. + /// The expression. + /// + public IQueryable CreateQuery(Expression expression) + { + return new MongoQuery(this, expression); + } + + /// + /// Constructs an object that can evaluate the query represented by a specified expression tree. + /// + /// An expression tree that represents a LINQ query. + /// + /// An that can evaluate the query represented by the specified expression tree. + /// + public IQueryable CreateQuery(Expression expression) + { + Type elementType = TypeHelper.GetElementType(expression.Type); + try + { + return (IQueryable)Activator.CreateInstance(typeof(MongoQuery<>).MakeGenericType(elementType), new object[] { this, expression }); + } + catch (TargetInvocationException ex) + { + throw ex.InnerException; + } + } + + /// + /// Executes the specified expression. + /// + /// The type of the result. + /// The expression. + /// + public TResult Execute(Expression expression) + { + object result = Execute(expression); + return (TResult)result; + } + + /// + /// Executes the query represented by a specified expression tree. + /// + /// An expression tree that represents a LINQ query. + /// + /// The value that results from executing the specified query. + /// + public object Execute(Expression expression) + { + var plan = BuildExecutionPlan(expression); + + var lambda = expression as LambdaExpression; + if (lambda != null) + { + var fn = Expression.Lambda(lambda.Type, plan, lambda.Parameters); + return fn.Compile(); + } + else + { + var efn = Expression.Lambda>(Expression.Convert(plan, typeof(object))); + var fn = efn.Compile(); + return fn(); + } + } + + /// + /// Gets the query object. + /// + /// The expression. + /// + internal MongoQueryObject GetQueryObject(Expression expression) + { + var projection = Translate(expression); + return new MongoQueryObjectBuilder().Build(projection); + } + + /// + /// Executes the query object. + /// + /// The query object. + /// + internal object ExecuteQueryObject(MongoQueryObject queryObject){ + if (queryObject.IsCount) + return ExecuteCount(queryObject); + if (queryObject.IsMapReduce) + return ExecuteMapReduce(queryObject); + return ExecuteFind(queryObject); + } + + private Expression BuildExecutionPlan(Expression expression) + { + var lambda = expression as LambdaExpression; + if (lambda != null) + expression = lambda.Body; + + var projection = Translate(expression); + + var rootQueryable = new RootQueryableFinder().Find(expression); + var provider = Expression.Convert( + Expression.Property(rootQueryable, typeof(IQueryable).GetProperty("Provider")), + typeof(MongoQueryProvider)); + + return new ExecutionBuilder().Build(projection, provider); + } + + private Expression Translate(Expression expression) + { + var rootQueryable = new RootQueryableFinder().Find(expression); + var elementType = ((IQueryable)((ConstantExpression)rootQueryable).Value).ElementType; + + expression = PartialEvaluator.Evaluate(expression, CanBeEvaluatedLocally); + + expression = new FieldBinder().Bind(expression, elementType); + expression = new QueryBinder(this, expression).Bind(expression); + expression = new AggregateRewriter().Rewrite(expression); + expression = new RedundantFieldRemover().Remove(expression); + expression = new RedundantSubqueryRemover().Remove(expression); + + expression = new OrderByRewriter().Rewrite(expression); + expression = new RedundantFieldRemover().Remove(expression); + expression = new RedundantSubqueryRemover().Remove(expression); + + return expression; + } + + /// + /// Determines whether this instance [can be evaluated locally] the specified expression. + /// + /// The expression. + /// + /// true if this instance [can be evaluated locally] the specified expression; otherwise, false. + /// + private bool CanBeEvaluatedLocally(Expression expression) + { + // any operation on a query can't be done locally + ConstantExpression cex = expression as ConstantExpression; + if (cex != null) + { + IQueryable query = cex.Value as IQueryable; + if (query != null && query.Provider == this) + return false; + } + MethodCallExpression mc = expression as MethodCallExpression; + if (mc != null && (mc.Method.DeclaringType == typeof(Enumerable) || mc.Method.DeclaringType == typeof(Queryable) || mc.Method.DeclaringType == typeof(MongoQueryable))) + { + return false; + } + if (expression.NodeType == ExpressionType.Convert && + expression.Type == typeof(object)) + return true; + return expression.NodeType != ExpressionType.Parameter && + expression.NodeType != ExpressionType.Lambda; + } + + /// + /// Executes the count. + /// + /// The query object. + /// + private object ExecuteCount(MongoQueryObject queryObject) + { + var miGetCollection = typeof(IMongoDatabase).GetMethods().Where(m => m.Name == "GetCollection" && m.GetGenericArguments().Length == 1 && m.GetParameters().Length == 1).Single().MakeGenericMethod(queryObject.DocumentType); + var collection = miGetCollection.Invoke(queryObject.Database, new[] { queryObject.CollectionName }); + + if (queryObject.Query == null) + return Convert.ToInt32(collection.GetType().GetMethod("Count", Type.EmptyTypes).Invoke(collection, null)); + + return Convert.ToInt32(collection.GetType().GetMethod("Count", new[] { typeof(object) }).Invoke(collection, new[] { queryObject.Query })); + } + + private object ExecuteFind(MongoQueryObject queryObject) + { + var miGetCollection = typeof(IMongoDatabase).GetMethods().Where(m => m.Name == "GetCollection" && m.GetGenericArguments().Length == 1 && m.GetParameters().Length == 1).Single().MakeGenericMethod(queryObject.DocumentType); + var collection = miGetCollection.Invoke(queryObject.Database, new[] { queryObject.CollectionName }); + + var cursor = collection.GetType().GetMethod("FindAll") + .Invoke(collection, null); + var cursorType = cursor.GetType(); + Document spec; + if (queryObject.Sort != null) + { + spec = new Document + { + {"query", queryObject.Query}, + {"orderby", queryObject.Sort} + }; + } + else + spec = queryObject.Query; + + cursorType.GetMethod("Spec", new[] { typeof(Document) }).Invoke(cursor, new object[] { spec }); + if(queryObject.Fields.Count > 0) + cursorType.GetMethod("Fields", new[] { typeof(Document) }).Invoke(cursor, new object[] { queryObject.Fields }); + cursorType.GetMethod("Limit").Invoke(cursor, new object[] { queryObject.NumberToLimit }); + cursorType.GetMethod("Skip").Invoke(cursor, new object[] { queryObject.NumberToSkip }); + + var executor = GetExecutor(queryObject.DocumentType, queryObject.Projector, queryObject.Aggregator, true); + return executor.Compile().DynamicInvoke(cursor.GetType().GetProperty("Documents").GetValue(cursor, null)); + } + + private object ExecuteMapReduce(MongoQueryObject queryObject) + { + var miGetCollection = typeof(IMongoDatabase).GetMethods().Where(m => m.Name == "GetCollection" && m.GetGenericArguments().Length == 1 && m.GetParameters().Length == 1).Single().MakeGenericMethod(queryObject.DocumentType); + var collection = miGetCollection.Invoke(queryObject.Database, new[] { queryObject.CollectionName }); + + var mapReduce = collection.GetType().GetMethod("MapReduce").Invoke(collection, null); + + var mapReduceCommand = (MapReduceCommand)mapReduce.GetType().GetProperty("Command").GetValue(mapReduce, null); + mapReduceCommand.Map = new Code(queryObject.MapFunction); + mapReduceCommand.Reduce = new Code(queryObject.ReduceFunction); + mapReduceCommand.Finalize = new Code(queryObject.FinalizerFunction); + mapReduceCommand.Query = queryObject.Query; + + if(queryObject.Sort != null) + mapReduceCommand.Sort = queryObject.Sort; + + mapReduceCommand.Limit = queryObject.NumberToLimit; + + if (queryObject.NumberToSkip != 0) + throw new InvalidQueryException("MapReduce queries do no support Skips."); + + var executor = GetExecutor(typeof(Document), queryObject.Projector, queryObject.Aggregator, true); + return executor.Compile().DynamicInvoke(mapReduce.GetType().GetProperty("Documents").GetValue(mapReduce, null)); + } + + private static LambdaExpression GetExecutor(Type documentType, LambdaExpression projector, + Expression aggregator, bool boxReturn) + { + var documents = Expression.Parameter(typeof(IEnumerable<>).MakeGenericType(documentType), "documents"); + Expression body = Expression.Call( + typeof(MongoQueryProvider), + "Project", + new[] { documentType, projector.Body.Type }, + documents, + projector); + if (aggregator != null) + body = Expression.Invoke(aggregator, body); + + if (boxReturn && body.Type != typeof(object)) + body = Expression.Convert(body, typeof(object)); + + return Expression.Lambda(body, documents); + } + + private static IEnumerable Project(IEnumerable documents, Func projector) + { + return documents.Select(projector); + } + + private class RootQueryableFinder : MongoExpressionVisitor + { + private Expression _root; + + public Expression Find(Expression expression) + { + Visit(expression); + return _root; + } + + protected override Expression Visit(Expression exp) + { + Expression result = base.Visit(exp); + + if (this._root == null && result != null && typeof(IQueryable).IsAssignableFrom(result.Type)) + { + this._root = result; + } + + return result; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/MongoQueryable.cs b/source/MongoDB/Linq/MongoQueryable.cs new file mode 100644 index 00000000..1e99bb68 --- /dev/null +++ b/source/MongoDB/Linq/MongoQueryable.cs @@ -0,0 +1,20 @@ +namespace MongoDB.Linq +{ + /// + /// + /// + internal static class MongoQueryable + { + /// + /// Keys the specified document. + /// + /// + /// The document. + /// The key. + /// + public static DocumentQuery Key(this T document, string key) where T : Document + { + return new DocumentQuery(document, key); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/AggregateChecker.cs b/source/MongoDB/Linq/Translators/AggregateChecker.cs new file mode 100644 index 00000000..af87f296 --- /dev/null +++ b/source/MongoDB/Linq/Translators/AggregateChecker.cs @@ -0,0 +1,36 @@ +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class AggregateChecker : MongoExpressionVisitor + { + private bool _hasAggregate; + + public bool HasAggregates(Expression expression) + { + _hasAggregate = false; + Visit(expression); + return _hasAggregate; + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + _hasAggregate = true; + return aggregate; + } + + protected override Expression VisitSelect(SelectExpression select) + { + Visit(select.Where); + VisitOrderBy(select.OrderBy); + VisitFieldDeclarationList(select.Fields); + return select; + } + + protected override Expression VisitSubquery(SubqueryExpression subquery) + { + return subquery; + } + } +} diff --git a/source/MongoDB/Linq/Translators/AggregateRewriter.cs b/source/MongoDB/Linq/Translators/AggregateRewriter.cs new file mode 100644 index 00000000..ecc29713 --- /dev/null +++ b/source/MongoDB/Linq/Translators/AggregateRewriter.cs @@ -0,0 +1,75 @@ +using System.Collections.Generic; +using System.Linq; +using MongoDB.Linq.Expressions; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class AggregateRewriter : MongoExpressionVisitor + { + ILookup _lookup; + readonly Dictionary _map; + + public AggregateRewriter() + { + _map = new Dictionary(); + } + + public Expression Rewrite(Expression expression) + { + _lookup = new AggregateGatherer().Gather(expression).ToLookup(x => x.GroupByAlias); + return Visit(expression); + } + + protected override Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregate) + { + Expression mapped; + if (_map.TryGetValue(aggregate, out mapped)) + return mapped; + + return Visit(aggregate.AggregateAsSubquery); + } + + protected override Expression VisitSelect(SelectExpression select) + { + select = (SelectExpression)base.VisitSelect(select); + if (_lookup.Contains(select.Alias)) + { + var fields = new List(select.Fields); + foreach (var ae in _lookup[select.Alias]) + { + var name = "_$agg" + fields.Count; + var field = new FieldDeclaration(name, ae.AggregateInGroupSelect); + if (_map.ContainsKey(ae)) + continue; + _map.Add(ae, new FieldExpression(ae.AggregateInGroupSelect, ae.GroupByAlias, name)); + fields.Add(field); + } + return new SelectExpression(select.Alias, fields, select.From, select.Where, select.OrderBy, select.GroupBy, select.IsDistinct, select.Skip, select.Take); + } + return select; + } + + private class AggregateGatherer : MongoExpressionVisitor + { + private readonly List _aggregates; + + public AggregateGatherer() + { + _aggregates = new List(); + } + + public IEnumerable Gather(Expression expression) + { + Visit(expression); + return _aggregates; + } + + protected override Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregate) + { + _aggregates.Add(aggregate); + return base.VisitAggregateSubquery(aggregate); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/DeclaredAliasGatherer.cs b/source/MongoDB/Linq/Translators/DeclaredAliasGatherer.cs new file mode 100644 index 00000000..7730840d --- /dev/null +++ b/source/MongoDB/Linq/Translators/DeclaredAliasGatherer.cs @@ -0,0 +1,30 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class DeclaredAliasGatherer : MongoExpressionVisitor + { + private HashSet _aliases; + + public HashSet Gather(Expression source) + { + _aliases = new HashSet(); + Visit(source); + return _aliases; + } + + protected override Expression VisitSelect(SelectExpression select) + { + _aliases.Add(select.Alias); + return select; + } + + protected override Expression VisitCollection(CollectionExpression collection) + { + _aliases.Add(collection.Alias); + return collection; + } + } +} diff --git a/source/MongoDB/Linq/Translators/DocumentFormatter.cs b/source/MongoDB/Linq/Translators/DocumentFormatter.cs new file mode 100644 index 00000000..7270ffdc --- /dev/null +++ b/source/MongoDB/Linq/Translators/DocumentFormatter.cs @@ -0,0 +1,372 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Text.RegularExpressions; + +using MongoDB.Linq.Expressions; +using MongoDB.Util; + +namespace MongoDB.Linq.Translators +{ + internal class DocumentFormatter : MongoExpressionVisitor + { + private Document _query; + private Stack _scopes; + private bool _hasPredicate; + + internal Document FormatDocument(Expression expression) + { + _query = new Document(); + _scopes = new Stack(); + Visit(expression); + return _query; + } + + protected override Expression VisitBinary(BinaryExpression b) + { + int scopeDepth = _scopes.Count; + bool hasPredicate = b.NodeType != ExpressionType.And && b.NodeType != ExpressionType.AndAlso && b.NodeType != ExpressionType.Or && b.NodeType != ExpressionType.OrElse; + VisitPredicate(b.Left, hasPredicate); + + switch (b.NodeType) + { + case ExpressionType.Equal: + break; + case ExpressionType.GreaterThan: + PushConditionScope("$gt"); + break; + case ExpressionType.GreaterThanOrEqual: + PushConditionScope("$gte"); + break; + case ExpressionType.LessThan: + PushConditionScope("$lt"); + break; + case ExpressionType.LessThanOrEqual: + PushConditionScope("$lte"); + break; + case ExpressionType.NotEqual: + PushConditionScope("$ne"); + break; + case ExpressionType.Modulo: + throw new NotImplementedException(); + case ExpressionType.And: + case ExpressionType.AndAlso: + break; + default: + throw new NotSupportedException(string.Format("The operation {0} is not supported.", b.NodeType)); + } + + VisitPredicate(b.Right, false); + + while (_scopes.Count > scopeDepth) + PopConditionScope(); + + return b; + } + + protected override Expression VisitConstant(ConstantExpression c) + { + AddCondition(c.Value); + return c; + } + + protected override Expression VisitField(FieldExpression f) + { + if (!_hasPredicate) + { + PushConditionScope(f.Name); + AddCondition(true); + PopConditionScope(); + } + else + PushConditionScope(f.Name); + return f; + } + + protected override Expression VisitMemberAccess(MemberExpression m) + { + if (m.Member.DeclaringType == typeof(Array)) + { + if (m.Member.Name == "Length") + { + VisitPredicate(m.Expression, true); + PushConditionScope("$size"); + return m; + } + } + else if (typeof(ICollection).IsAssignableFrom(m.Member.DeclaringType)) + { + if (m.Member.Name == "Count") + { + VisitPredicate(m.Expression, true); + PushConditionScope("$size"); + return m; + } + } + else if (typeof(ICollection<>).IsOpenTypeAssignableFrom(m.Member.DeclaringType)) + { + if (m.Member.Name == "Count") + { + VisitPredicate(m.Expression, true); + PushConditionScope("$size"); + return m; + } + } + + throw new NotSupportedException(string.Format("The member {0} is not supported.", m.Member.Name)); + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + FieldExpression field; + if (m.Method.DeclaringType == typeof(Queryable) || m.Method.DeclaringType == typeof(Enumerable)) + { + switch (m.Method.Name) + { + case "Any": + if(m.Arguments.Count != 2) + throw new NotSupportedException("Only the Any method with 2 arguments is supported."); + + field = m.Arguments[0] as FieldExpression; + if (field == null) + throw new InvalidQueryException("A mongo field must be a part of the Contains method."); + VisitPredicate(field, true); + PushConditionScope("$elemMatch"); + VisitPredicate(m.Arguments[1], true); + PopConditionScope(); //elemMatch + PopConditionScope(); //field + return m; + + case "Contains": + if (m.Arguments.Count != 2) + throw new NotSupportedException("Only the Contains method with 2 arguments is supported."); + + field = m.Arguments[0] as FieldExpression; + if (field != null) + { + VisitPredicate(field, true); + AddCondition(EvaluateConstant(m.Arguments[1])); + PopConditionScope(); + return m; + } + + field = m.Arguments[1] as FieldExpression; + if (field == null) + throw new InvalidQueryException("A mongo field must be a part of the Contains method."); + VisitPredicate(field, true); + AddCondition("$in", EvaluateConstant(m.Arguments[0])); + PopConditionScope(); + return m; + case "Count": + if (m.Arguments.Count == 1) + { + Visit(m.Arguments[0]); + PushConditionScope("$size"); + return m; + } + throw new NotSupportedException("The method Count with a predicate is not supported for field."); + } + } + else if(typeof(ICollection<>).IsOpenTypeAssignableFrom(m.Method.DeclaringType) || typeof(IList).IsAssignableFrom(m.Method.DeclaringType)) + { + switch(m.Method.Name) + { + case "Contains": + field = m.Arguments[0] as FieldExpression; + if (field == null) + throw new InvalidQueryException(string.Format("The mongo field must be the argument in method {0}.", m.Method.Name)); + VisitPredicate(field, true); + AddCondition("$in", EvaluateConstant(m.Object).OfType().ToArray()); + PopConditionScope(); + return m; + } + } + else if (m.Method.DeclaringType == typeof(string)) + { + field = m.Object as FieldExpression; + if (field == null) + throw new InvalidQueryException(string.Format("The mongo field must be the operator for a string operation of type {0}.", m.Method.Name)); + VisitPredicate(field, true); + + var value = EvaluateConstant(m.Arguments[0]); + + switch(m.Method.Name) + { + case "StartsWith": + AddCondition(new MongoRegex(string.Format("^{0}", value))); + break; + case "EndsWith": + AddCondition(new MongoRegex(string.Format("{0}$", value))); + break; + case "Contains": + AddCondition(new MongoRegex(string.Format("{0}", value))); + break; + default: + throw new NotSupportedException(string.Format("The string method {0} is not supported.", m.Method.Name)); + } + + PopConditionScope(); + return m; + } + else if (m.Method.DeclaringType == typeof(Regex)) + { + if (m.Method.Name == "IsMatch") + { + field = m.Arguments[0] as FieldExpression; + if (field == null) + throw new InvalidQueryException(string.Format("The mongo field must be the operator for a string operation of type {0}.", m.Method.Name)); + + VisitPredicate(field, true); + string value; + if (m.Object == null) + value = EvaluateConstant(m.Arguments[1]); + else + throw new InvalidQueryException(string.Format("Only the static Regex.IsMatch is supported.", m.Method.Name)); + + var regexOptions = RegexOptions.None; + if (m.Arguments.Count > 2) + regexOptions = EvaluateConstant(m.Arguments[2]); + + AddCondition(new MongoRegex(value, regexOptions)); + PopConditionScope(); + return m; + } + } + + throw new NotSupportedException(string.Format("The method {0} is not supported.", m.Method.Name)); + } + + protected override Expression VisitUnary(UnaryExpression u) + { + switch (u.NodeType) + { + case ExpressionType.Not: + PushConditionScope("$not"); + VisitPredicate(u.Operand, false); + PopConditionScope(); + break; + case ExpressionType.ArrayLength: + Visit(u.Operand); + PushConditionScope("$size"); + break; + case ExpressionType.Convert: + case ExpressionType.ConvertChecked: + Visit(u.Operand); + break; + default: + throw new NotSupportedException(string.Format("The unary operator {0} is not supported.", u.NodeType)); + } + + return u; + } + + private void AddCondition(object value) + { + _scopes.Peek().AddCondition(value ?? NullPlaceHolder.Instance); + } + + private void AddCondition(string name, object value) + { + PushConditionScope(name); + AddCondition(value); + PopConditionScope(); + } + + private void PushConditionScope(string name) + { + if (_scopes.Count == 0) + _scopes.Push(new Scope(name, _query[name])); + else + _scopes.Push(_scopes.Peek().CreateChildScope(name)); + } + + private void PopConditionScope() + { + var scope = _scopes.Pop(); + if (scope.Value == null) + return; + + var doc = _query; + foreach (var s in _scopes.Reverse()) //as if it were a queue + { + var sub = doc[s.Key]; + if (sub == null) + doc[s.Key] = sub = new Document(); + else if (!(sub is Document)) + throw new InvalidQueryException(); + + doc = (Document)sub; + } + + if (scope.Value is NullPlaceHolder) + doc[scope.Key] = null; + else + doc[scope.Key] = scope.Value; + } + + private void VisitPredicate(Expression expression, bool hasPredicate) + { + var oldHasPredicate = _hasPredicate; + _hasPredicate = hasPredicate; + Visit(expression); + _hasPredicate = oldHasPredicate; + } + + private static T EvaluateConstant(Expression e) + { + if (e.NodeType != ExpressionType.Constant) + throw new ArgumentException("Expression must be a constant."); + + return (T)((ConstantExpression)e).Value; + } + + private static bool IsBoolean(Expression expression) + { + return expression.Type == typeof(bool) || expression.Type == typeof(bool?); + } + + private class NullPlaceHolder + { + public static readonly NullPlaceHolder Instance = new NullPlaceHolder(); + + private NullPlaceHolder() + { } + } + + private class Scope + { + public string Key { get; private set; } + + public object Value { get; private set; } + + public Scope(string key, object initialValue) + { + Key = key; + Value = initialValue; + } + + public void AddCondition(object value) + { + if (Value is Document) + { + if (!(value is Document)) + throw new InvalidQueryException(); + + ((Document)Value).Merge((Document)value); + } + else + Value = value; + } + + public Scope CreateChildScope(string name) + { + if (Value is Document) + return new Scope(name, ((Document)Value)[name]); + + return new Scope(name, null); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/ExpressionReplacer.cs b/source/MongoDB/Linq/Translators/ExpressionReplacer.cs new file mode 100644 index 00000000..da803d66 --- /dev/null +++ b/source/MongoDB/Linq/Translators/ExpressionReplacer.cs @@ -0,0 +1,30 @@ +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class ExpressionReplacer : MongoExpressionVisitor + { + private Expression _replaceWith; + private Expression _searchFor; + + public Expression Replace(Expression expression, Expression searchFor, Expression replaceWith) + { + _searchFor = searchFor; + _replaceWith = replaceWith; + return Visit(expression); + } + + public Expression ReplaceAll(Expression expression, Expression[] searchFor, Expression[] replaceWith) + { + for(var i = 0; i < searchFor.Length; i++) + expression = Replace(expression, searchFor[i], replaceWith[i]); + return expression; + } + + protected override Expression Visit(Expression exp) + { + return exp == _searchFor ? _replaceWith : base.Visit(exp); + } + } +} diff --git a/source/MongoDB/Linq/Translators/FieldBinder.cs b/source/MongoDB/Linq/Translators/FieldBinder.cs new file mode 100644 index 00000000..0cf9c744 --- /dev/null +++ b/source/MongoDB/Linq/Translators/FieldBinder.cs @@ -0,0 +1,191 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; +using MongoDB.Util; + +namespace MongoDB.Linq.Translators +{ + internal class FieldBinder : ExpressionVisitor + { + private static readonly HashSet CollectionTypes = new HashSet + { + typeof(ICollection), typeof(ICollection<>) + }; + + private Alias _alias; + private FieldFinder _finder; + private Type _elementType; + + public Expression Bind(Expression expression, Type elementType) + { + _alias = new Alias(); + _finder = new FieldFinder(); + _elementType = elementType; + return Visit(expression); + } + + protected override Expression Visit(Expression exp) + { + if (exp == null) + return exp; + + var fieldName = _finder.Find(exp); + if (fieldName != null) + return new FieldExpression(exp, _alias, fieldName); + + return base.Visit(exp); + } + + protected override Expression VisitParameter(ParameterExpression p) + { + if(p.Type == _elementType) + return new FieldExpression(p, _alias, "*"); + + return base.VisitParameter(p); + } + + private class FieldFinder : ExpressionVisitor + { + private Stack _fieldParts; + private bool _isBlocked; + + public string Find(Expression expression) + { + if (expression.NodeType == ExpressionType.Parameter) + return null; + + _fieldParts = new Stack(); + _isBlocked = false; + Visit(expression); + var fieldName = string.Join(".", _fieldParts.ToArray()); + if (_isBlocked) + fieldName = null; + + return fieldName; + } + + protected override Expression Visit(Expression exp) + { + if (exp == null) + return null; + + switch (exp.NodeType) + { + case ExpressionType.ArrayIndex: + case ExpressionType.Call: + case ExpressionType.MemberAccess: + case ExpressionType.Parameter: + return base.Visit(exp); + default: + _isBlocked = true; + return exp; + } + } + + protected override Expression VisitBinary(BinaryExpression b) + { + //this is an ArrayIndex Node + _fieldParts.Push(((int)((ConstantExpression)b.Right).Value).ToString()); + Visit(b.Left); + return b; + } + + protected override Expression VisitMemberAccess(MemberExpression m) + { + var declaringType = m.Member.DeclaringType; + if (!IsNativeToMongo(declaringType) && !IsCollection(declaringType)) + { + _fieldParts.Push(m.Member.Name); + Visit(m.Expression); + return m; + } + + _isBlocked = true; + return m; + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + if (m.Method.DeclaringType == typeof(Queryable) || m.Method.DeclaringType == typeof(Enumerable)) + { + if (m.Method.Name == "ElementAt" || m.Method.Name == "ElementAtOrDefault") + { + _fieldParts.Push(((int)((ConstantExpression)m.Arguments[1]).Value).ToString()); + Visit(m.Arguments[0]); + return m; + } + } + else if (m.Method.DeclaringType == typeof(MongoQueryable)) + { + if (m.Method.Name == "Key") + { + _fieldParts.Push((string)((ConstantExpression)m.Arguments[1]).Value); + Visit(m.Arguments[0]); + return m; + } + } + else if (typeof(Document).IsAssignableFrom(m.Method.DeclaringType)) + { + if (m.Method.Name == "get_Item") //TODO: does this work for VB? + { + _fieldParts.Push((string)((ConstantExpression)m.Arguments[0]).Value); + Visit(m.Object); + return m; + } + } + else if (typeof(IList<>).IsOpenTypeAssignableFrom(m.Method.DeclaringType) || typeof(IList).IsAssignableFrom(m.Method.DeclaringType)) + { + if (m.Method.Name == "get_Item") + { + _fieldParts.Push(((int)((ConstantExpression)m.Arguments[0]).Value).ToString()); + Visit(m.Object); + return m; + } + } + + _isBlocked = true; + return m; + } + + //protected override Expression VisitParameter(ParameterExpression p) + //{ + // if (p.Type.IsGenericType && p.Type.GetGenericTypeDefinition() == typeof(IGrouping<,>)) + // _isBlocked = true; + // return base.VisitParameter(p); + //} + + private static bool IsCollection(Type type) + { + //HACK: this is going to generally subvert custom objects that implement ICollection or ICollection, + //but are not collections + if (type.IsGenericType) + type = type.GetGenericTypeDefinition(); + + return CollectionTypes.Any(x => x.IsAssignableFrom(type)); + } + + private static bool IsNativeToMongo(Type type) + { + //TODO: this code exists here and in BsonClassMapDescriptor. Should probably be centralized... + var typeCode = Type.GetTypeCode(type); + + if (typeCode != TypeCode.Object) + return true; + + if (type == typeof(Guid)) + return true; + + if (type == typeof(Oid)) + return true; + + if (type == typeof(byte[])) + return true; + + return false; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/FieldGatherer.cs b/source/MongoDB/Linq/Translators/FieldGatherer.cs new file mode 100644 index 00000000..092a98cc --- /dev/null +++ b/source/MongoDB/Linq/Translators/FieldGatherer.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class FieldGatherer : MongoExpressionVisitor + { + private List _fields; + + public ReadOnlyCollection Gather(Expression exp) + { + _fields = new List(); + Visit(exp); + return _fields.AsReadOnly(); + } + + protected override Expression VisitSelect(SelectExpression select) + { + VisitFieldDeclarationList(select.Fields); + return select; + } + + protected override Expression VisitField(FieldExpression field) + { + var fields = new FieldGatherer().Gather(field.Expression); + if (fields.Count == 0) + _fields.Add(field); + else + _fields.AddRange(fields); + + return base.VisitField(field); + } + } +} diff --git a/source/MongoDB/Linq/Translators/FieldMapper.cs b/source/MongoDB/Linq/Translators/FieldMapper.cs new file mode 100644 index 00000000..a3d616e5 --- /dev/null +++ b/source/MongoDB/Linq/Translators/FieldMapper.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class FieldMapper : MongoExpressionVisitor + { + private HashSet _oldAliases; + private Alias _newAlias; + + public Expression Map(Expression expression, Alias newAlias, IEnumerable oldAliases) + { + _oldAliases = new HashSet(oldAliases); + _newAlias = newAlias; + return Visit(expression); + } + + public Expression Map(Expression expression, Alias newAlias, params Alias[] oldAliases) + { + return Map(expression, newAlias, (IEnumerable)oldAliases); + } + + protected override Expression VisitField(FieldExpression field) + { + if (_oldAliases.Contains(field.Alias)) + return new FieldExpression(field.Expression, _newAlias, field.Name); + return field; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/FieldProjector.cs b/source/MongoDB/Linq/Translators/FieldProjector.cs new file mode 100644 index 00000000..060a8cab --- /dev/null +++ b/source/MongoDB/Linq/Translators/FieldProjector.cs @@ -0,0 +1,176 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class FieldProjector : MongoExpressionVisitor + { + private HashSet _candidates; + private Alias[] _existingAliases; + private HashSet _fieldNames; + private List _fields; + private Dictionary _map; + private Alias _newAlias; + private readonly Nominator _nominator; + private int columnIndex; + + public FieldProjector(Func canBeField) + { + _nominator = new Nominator(canBeField); + } + + public FieldProjection ProjectFields(Expression expression, Alias newAlias, params Alias[] existingAliases) + { + _newAlias = newAlias; + _existingAliases = existingAliases; + _fields = new List(); + _fieldNames = new HashSet(); + _candidates = _nominator.Nominate(expression); + _map = new Dictionary(); + return new FieldProjection(_fields.AsReadOnly(), Visit(expression)); + } + + protected override Expression Visit(Expression exp) + { + if (_candidates.Contains(exp)) + { + if (exp.NodeType == (ExpressionType)MongoExpressionType.Field) + { + var field = (FieldExpression)exp; + FieldExpression mapped; + if (_map.TryGetValue(field, out mapped)) + return mapped; + + Alias alias = _existingAliases.Contains(field.Alias) ? field.Alias : _newAlias; + var fieldName = GetUniqueFieldName(field.Name); + _fields.Add(new FieldDeclaration(fieldName, field)); + return new FieldExpression(exp, alias, field.Name); + } + else + { + var fieldName = GetNextFieldName(); + _fields.Add(new FieldDeclaration(fieldName, exp)); + return new FieldExpression(exp, _newAlias, fieldName); + } + } + return base.Visit(exp); + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + return aggregate; + } + + protected override Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregateSubquery) + { + return aggregateSubquery; + } + + private bool IsFieldNameInUse(string name) + { + return _fieldNames.Contains(name); + } + + private string GetUniqueFieldName(string name) + { + string baseName = name; + int suffix = 1; + while(IsFieldNameInUse(name)) + name = baseName + (suffix++); + return name; + } + + private string GetNextFieldName() + { + return GetUniqueFieldName("_$f" + (columnIndex++)); + } + + public class FieldProjection + { + private readonly ReadOnlyCollection _fields; + private readonly Expression _projector; + + public ReadOnlyCollection Fields + { + get { return _fields; } + } + + public Expression Projector + { + get { return _projector; } + } + + public FieldProjection(ReadOnlyCollection fields, Expression projector) + { + _fields = fields; + _projector = projector; + } + } + + private class Nominator : MongoExpressionVisitor + { + private readonly Func _predicate; + private HashSet _candidates; + private bool _isBlocked; + + public Nominator(Func predicate) + { + if (predicate == null) + throw new ArgumentNullException("predicate"); + + _predicate = predicate; + } + + public HashSet Nominate(Expression expression) + { + _candidates = new HashSet(); + _isBlocked = false; + Visit(expression); + return _candidates; + } + + protected override Expression Visit(Expression expression) + { + if (expression != null) + { + var saveIsBlocked = _isBlocked; + _isBlocked = false; + base.Visit(expression); + if (!_isBlocked) + { + if (_predicate(expression)) + _candidates.Add(expression); + else + _isBlocked = true; + } + _isBlocked |= saveIsBlocked; + } + return expression; + } + + protected override Expression VisitField(FieldExpression f) + { + return f; + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + return aggregate; + } + + protected override Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregateSubquery) + { + return aggregateSubquery; + } + + protected override Expression VisitScalar(ScalarExpression scalar) + { + return scalar; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/JavascriptFormatter.cs b/source/MongoDB/Linq/Translators/JavascriptFormatter.cs new file mode 100644 index 00000000..9f4dd2bc --- /dev/null +++ b/source/MongoDB/Linq/Translators/JavascriptFormatter.cs @@ -0,0 +1,318 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Text; +using System.Text.RegularExpressions; + +using MongoDB.Linq.Expressions; +using MongoDB.Util; + +namespace MongoDB.Linq.Translators +{ + internal class JavascriptFormatter : MongoExpressionVisitor + { + private StringBuilder _js; + + public string FormatJavascript(Expression expression) + { + _js = new StringBuilder(); + Visit(expression); + return _js.ToString(); + } + + protected override Expression VisitBinary(BinaryExpression b) + { + _js.Append("("); + Visit(b.Left); + + switch (b.NodeType) + { + case ExpressionType.Equal: + _js.Append(" === "); + break; + case ExpressionType.GreaterThan: + _js.Append(" > "); + break; + case ExpressionType.GreaterThanOrEqual: + _js.Append(" >= "); + break; + case ExpressionType.LessThan: + _js.Append(" < "); + break; + case ExpressionType.LessThanOrEqual: + _js.Append(" <= "); + break; + case ExpressionType.NotEqual: + _js.Append(" != "); + break; + case ExpressionType.Modulo: + _js.Append(" % "); + break; + case ExpressionType.And: + case ExpressionType.AndAlso: + _js.Append(" && "); + break; + case ExpressionType.Or: + case ExpressionType.OrElse: + _js.Append(" || "); + break; + case ExpressionType.Add: + case ExpressionType.AddChecked: + _js.Append(" + "); + break; + case ExpressionType.Coalesce: + _js.Append(" || "); + break; + case ExpressionType.Divide: + _js.Append(" / "); + break; + case ExpressionType.ExclusiveOr: + _js.Append(" ^ "); + break; + case ExpressionType.LeftShift: + _js.Append(" << "); + break; + case ExpressionType.Multiply: + case ExpressionType.MultiplyChecked: + _js.Append(" * "); + break; + case ExpressionType.RightShift: + _js.Append(" >> "); + break; + case ExpressionType.Subtract: + case ExpressionType.SubtractChecked: + _js.Append(" - "); + break; + default: + throw new NotSupportedException(string.Format("The operation {0} is not supported.", b.NodeType)); + } + + Visit(b.Right); + + _js.Append(")"); + return b; + } + + protected override Expression VisitConstant(ConstantExpression c) + { + _js.Append(GetJavascriptValueForConstant(c)); + return c; + } + + protected override Expression VisitField(FieldExpression f) + { + //TODO: may need to handle a field that composes other fields. + _js.AppendFormat("this.{0}", f.Name); + return f; + } + + protected override Expression VisitMemberAccess(MemberExpression m) + { + if (m.Member.DeclaringType == typeof(Array)) + { + if (m.Member.Name == "Length") + { + Visit(m.Expression); + _js.Append(".length"); + return m; + } + } + else if (m.Member.DeclaringType == typeof(string)) + { + if (m.Member.Name == "Length") + { + Visit(m.Expression); + _js.Append(".length"); + return m; + } + } + else if (typeof(ICollection).IsAssignableFrom(m.Member.DeclaringType)) + { + if (m.Member.Name == "Count") + { + Visit(m.Expression); + _js.Append(".length"); + return m; + } + } + else if (typeof(ICollection<>).IsOpenTypeAssignableFrom(m.Member.DeclaringType)) + { + if (m.Member.Name == "Count") + { + Visit(m.Expression); + _js.Append(".length"); + return m; + } + } + + throw new NotSupportedException(string.Format("The member {0} is not supported.", m.Member.Name)); + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + if (m == null) + return m; + + FieldExpression field; + if (m.Method.DeclaringType == typeof(Queryable) || m.Method.DeclaringType == typeof(Enumerable)) + { + switch (m.Method.Name) + { + case "Count": + if (m.Arguments.Count == 1) + { + Visit(m.Arguments[0]); + _js.Append(".length"); + return m; + } + throw new NotSupportedException("The method Count with a predicate is not supported for field."); + } + } + else if (m.Method.DeclaringType == typeof(string)) + { + field = m.Object as FieldExpression; + if (field == null) + throw new InvalidQueryException(string.Format("The mongo field must be the operator for a string operation of type {0}.", m.Method.Name)); + Visit(field); + + switch (m.Method.Name) + { + case "StartsWith": + _js.AppendFormat("/^{0}/", EvaluateConstant(m.Arguments[0])); + break; + case "EndsWith": + _js.AppendFormat("/{0}$/", EvaluateConstant(m.Arguments[0])); + break; + case "Contains": + _js.AppendFormat("/{0}/", EvaluateConstant(m.Arguments[0])); + break; + case "SubString": + switch(m.Arguments.Count) + { + case 1: + _js.AppendFormat(".substr({0})", EvaluateConstant(m.Arguments[0])); + break; + case 2: + _js.AppendFormat(".substr({0})", EvaluateConstant(m.Arguments[0]), EvaluateConstant(m.Arguments[1])); + break; + } + break; + case "ToLower": + _js.Append(".toLowerCase()"); + break; + case "ToUpper": + _js.Append(".toUpperCase()"); + break; + case "get_Chars": + _js.AppendFormat("[{0}]", EvaluateConstant(m.Arguments[0])); + break; + default: + throw new NotSupportedException(string.Format("The string method {0} is not supported.", m.Method.Name)); + } + + return m; + } + else if (m.Method.DeclaringType == typeof(Regex)) + { + if (m.Method.Name == "IsMatch") + { + field = m.Arguments[0] as FieldExpression; + if (field == null) + throw new InvalidQueryException(string.Format("The mongo field must be the operator for a string operation of type {0}.", m.Method.Name)); + + Visit(field); + string value; + if (m.Object == null) + value = EvaluateConstant(m.Arguments[1]); + else + throw new InvalidQueryException(string.Format("Only the static Regex.IsMatch is supported.", m.Method.Name)); + + _js.AppendFormat("/{0}/", value); + return m; + } + } + + throw new NotSupportedException(string.Format("The method {0} is not supported.", m.Method.Name)); + } + + protected override NewExpression VisitNew(NewExpression nex) + { + _js.Append(new JavascriptObjectFormatter().FormatObject(nex)); + return nex; + } + + protected override Expression VisitUnary(UnaryExpression u) + { + switch (u.NodeType) + { + case ExpressionType.Negate: + case ExpressionType.NegateChecked: + _js.Append("-"); + Visit(u.Operand); + break; + case ExpressionType.UnaryPlus: + _js.Append("+"); + Visit(u.Operand); + break; + case ExpressionType.Not: + _js.Append("!("); + Visit(u.Operand); + _js.Append(")"); + break; + case ExpressionType.Convert: + case ExpressionType.ConvertChecked: + Visit(u.Operand); + break; + default: + throw new NotSupportedException(string.Format("The unary operator {0} is not supported.", u.NodeType)); + } + + return u; + } + + private static T EvaluateConstant(Expression e) + { + if (e.NodeType != ExpressionType.Constant) + throw new ArgumentException("Expression must be a constant."); + + return (T)((ConstantExpression)e).Value; + } + + private static string GetJavascriptValueForConstant(ConstantExpression c) + { + return JsonFormatter.SerializeForServerSide(c.Value); + } + + private class JavascriptObjectFormatter : MongoExpressionVisitor + { + private StringBuilder _js; + private readonly JavascriptFormatter _formatter; + + public JavascriptObjectFormatter() + { + _formatter = new JavascriptFormatter(); + } + + public string FormatObject(Expression nex) + { + _js = new StringBuilder("{"); + Visit(nex); + return _js.Append("}").ToString(); + } + + protected override NewExpression VisitNew(NewExpression nex) + { + var parameters = nex.Constructor.GetParameters(); + for(int i = 0; i < nex.Arguments.Count; i++) + { + if (i > 0) + _js.Append(", "); + _js.AppendFormat("{0}: {1}", parameters[i].Name, _formatter.FormatJavascript(nex.Arguments[i])); + } + return nex; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/MapReduceFinalizerFunctionBuilder.cs b/source/MongoDB/Linq/Translators/MapReduceFinalizerFunctionBuilder.cs new file mode 100644 index 00000000..f5f2cd8b --- /dev/null +++ b/source/MongoDB/Linq/Translators/MapReduceFinalizerFunctionBuilder.cs @@ -0,0 +1,66 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text; +using MongoDB.Linq.Expressions; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class MapReduceFinalizerFunctionBuilder : MongoExpressionVisitor + { + private StringBuilder _return; + private List> _returnValues; + private string _currentAggregateName; + + public string Build(ReadOnlyCollection fields) + { + _return = new StringBuilder(); + _returnValues = new List>(); + _return.Append("function(key, value) { return { "); + + VisitFieldDeclarationList(fields); + + for (int i = 0; i < _returnValues.Count; i++) + { + if (i > 0) + _return.Append(", "); + _return.AppendFormat("\"{0}\": {1}", _returnValues[i].Key, _returnValues[i].Value); + } + + _return.Append("};}"); + + return _return.ToString(); + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + switch (aggregate.AggregateType) + { + case AggregateType.Average: + _returnValues.Add(new KeyValuePair(_currentAggregateName, string.Format("value.{0}Sum/value.{0}Cnt", _currentAggregateName))); + break; + case AggregateType.Count: + case AggregateType.Max: + case AggregateType.Min: + case AggregateType.Sum: + _returnValues.Add(new KeyValuePair(_currentAggregateName, "value." + _currentAggregateName)); + break; + } + + return aggregate; + } + + protected override ReadOnlyCollection VisitFieldDeclarationList(ReadOnlyCollection fields) + { + for (int i = 0, n = fields.Count; i < n; i++) + { + _currentAggregateName = fields[i].Name; + if (_currentAggregateName == "*") + continue; + Visit(fields[i].Expression); + } + + return fields; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/MapReduceMapFunctionBuilder.cs b/source/MongoDB/Linq/Translators/MapReduceMapFunctionBuilder.cs new file mode 100644 index 00000000..322b0b13 --- /dev/null +++ b/source/MongoDB/Linq/Translators/MapReduceMapFunctionBuilder.cs @@ -0,0 +1,89 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text; +using MongoDB.Linq.Expressions; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class MapReduceMapFunctionBuilder : MongoExpressionVisitor + { + private readonly JavascriptFormatter _formatter; + private Dictionary _initMap; + private string _currentAggregateName; + + public MapReduceMapFunctionBuilder() + { + _formatter = new JavascriptFormatter(); + } + + public string Build(ReadOnlyCollection fields, Expression groupBy) + { + var sb = new StringBuilder(); + sb.Append("function() { emit("); + + sb.Append(groupBy == null ? "1" : _formatter.FormatJavascript(groupBy)); + + sb.Append(", "); + + _initMap = new Dictionary(); + VisitFieldDeclarationList(fields); + FormatInit(sb); + + sb.Append("); }"); + + return sb.ToString(); + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + switch (aggregate.AggregateType) + { + case AggregateType.Average: + _initMap[_currentAggregateName + "Sum"] = _formatter.FormatJavascript(aggregate.Argument); + _initMap[_currentAggregateName + "Cnt"] = "1"; + break; + case AggregateType.Count: + _initMap[_currentAggregateName] = "1"; + break; + case AggregateType.Max: + case AggregateType.Min: + case AggregateType.Sum: + _initMap[_currentAggregateName] = _formatter.FormatJavascript(aggregate.Argument); + break; + } + + return aggregate; + } + + protected override ReadOnlyCollection VisitFieldDeclarationList(ReadOnlyCollection fields) + { + for (int i = 0, n = fields.Count; i < n; i++) + { + _currentAggregateName = fields[i].Name; + if (_currentAggregateName == "*") + continue; + Visit(fields[i].Expression); + } + + return fields; + } + + private void FormatInit(StringBuilder sb) + { + sb.Append("{"); + var isFirst = true; + foreach (var field in _initMap) + { + if (isFirst) + isFirst = false; + else + sb.Append(", "); + + sb.AppendFormat("\"{0}\": {1}", field.Key, field.Value); + } + sb.Append("}"); + } + + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/MapReduceReduceFunctionBuilder.cs b/source/MongoDB/Linq/Translators/MapReduceReduceFunctionBuilder.cs new file mode 100644 index 00000000..bad637eb --- /dev/null +++ b/source/MongoDB/Linq/Translators/MapReduceReduceFunctionBuilder.cs @@ -0,0 +1,118 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text; +using MongoDB.Linq.Expressions; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class MapReduceReduceFunctionBuilder : MongoExpressionVisitor + { + private StringBuilder _declare; + private StringBuilder _loop; + private StringBuilder _return; + private List> _returnValues; + private string _currentAggregateName; + + public string Build(ReadOnlyCollection fields) + { + _declare = new StringBuilder(); + _loop = new StringBuilder(); + _return = new StringBuilder(); + _returnValues = new List>(); + _declare.Append("function(key, values) {"); + _loop.Append("values.forEach(function(doc) {"); + _return.Append("return { "); + + VisitFieldDeclarationList(fields); + + for (int i = 0; i < _returnValues.Count; i++) + { + if (i > 0) + _return.Append(", "); + _return.AppendFormat("\"{0}\": {1}", _returnValues[i].Key, _returnValues[i].Value); + } + + _loop.Append("});"); + _return.Append("};}"); + + return _declare.ToString() + _loop + _return; + } + + protected override Expression VisitAggregate(AggregateExpression aggregate) + { + switch (aggregate.AggregateType) + { + case AggregateType.Average: + AverageAggregate(aggregate); + break; + case AggregateType.Count: + CountAggregate(aggregate); + break; + case AggregateType.Max: + MaxAggregate(aggregate); + break; + case AggregateType.Min: + MinAggregate(aggregate); + break; + case AggregateType.Sum: + SumAggregate(aggregate); + break; + } + + return aggregate; + } + + protected override ReadOnlyCollection VisitFieldDeclarationList(ReadOnlyCollection fields) + { + for (int i = 0, n = fields.Count; i < n; i++) + { + _currentAggregateName = fields[i].Name; + if (_currentAggregateName == "*") + continue; + Visit(fields[i].Expression); + } + + return fields; + } + + private void AverageAggregate(AggregateExpression aggregate) + { + var old = _currentAggregateName; + _currentAggregateName = old + "Cnt"; + CountAggregate(aggregate); + _currentAggregateName = old + "Sum"; + SumAggregate(aggregate); + _currentAggregateName = old; + } + + private void CountAggregate(AggregateExpression aggregate) + { + _declare.AppendFormat("var {0} = 0;", _currentAggregateName); + _loop.AppendFormat("{0} += doc.{0};", _currentAggregateName); + _returnValues.Add(new KeyValuePair(_currentAggregateName, _currentAggregateName)); + } + + private void MaxAggregate(AggregateExpression aggregate) + { + _declare.AppendFormat("var {0} = Number.MIN_VALUE;", _currentAggregateName); + _loop.AppendFormat("if(doc.{0} > {0}) {0} = doc.{0};", _currentAggregateName); + _returnValues.Add(new KeyValuePair(_currentAggregateName, _currentAggregateName)); + } + + private void MinAggregate(AggregateExpression aggregate) + { + _declare.AppendFormat("var {0} = Number.MAX_VALUE;", _currentAggregateName); + _loop.AppendFormat("if(doc.{0} < {0}) {0} = doc.{0};", _currentAggregateName); + _returnValues.Add(new KeyValuePair(_currentAggregateName, _currentAggregateName)); + } + + private void SumAggregate(AggregateExpression aggregate) + { + _declare.AppendFormat("var {0} = 0;", _currentAggregateName); + _loop.AppendFormat("{0} += doc.{0};", _currentAggregateName); + _returnValues.Add(new KeyValuePair(_currentAggregateName, _currentAggregateName)); + } + + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/MongoQueryObjectBuilder.cs b/source/MongoDB/Linq/Translators/MongoQueryObjectBuilder.cs new file mode 100644 index 00000000..16970c87 --- /dev/null +++ b/source/MongoDB/Linq/Translators/MongoQueryObjectBuilder.cs @@ -0,0 +1,179 @@ +using System; +using MongoDB.Linq.Expressions; +using System.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class MongoQueryObjectBuilder : MongoExpressionVisitor + { + private MongoQueryObject _queryObject; + private QueryAttributes _queryAttributes; + + internal MongoQueryObject Build(Expression expression) + { + _queryObject = new MongoQueryObject(); + _queryAttributes = new QueryAttributesGatherer().Gather(expression); + _queryObject.IsCount = _queryAttributes.IsCount; + _queryObject.IsMapReduce = _queryAttributes.IsMapReduce; + Visit(expression); + return _queryObject; + } + + protected override Expression VisitSelect(SelectExpression select) + { + select = PreProcessSelect(select); + + if (select.From != null) + VisitSource(select.From); + if (select.Where != null) + { + try + { + //try this first, and if it fails, resort to javascript generation, which is slower on the server side. + _queryObject.SetQueryDocument(new DocumentFormatter().FormatDocument(select.Where)); + } + catch + { + _queryObject.SetWhereClause(new JavascriptFormatter().FormatJavascript(select.Where)); + } + } + + if (_queryAttributes.IsMapReduce) + { + _queryObject.IsMapReduce = true; + _queryObject.MapFunction = new MapReduceMapFunctionBuilder().Build(select.Fields, select.GroupBy); + _queryObject.ReduceFunction = new MapReduceReduceFunctionBuilder().Build(select.Fields); + _queryObject.FinalizerFunction = new MapReduceFinalizerFunctionBuilder().Build(select.Fields); + } + else if(!_queryAttributes.IsCount && !select.Fields.HasSelectAllField()) + { + var fieldGatherer = new FieldGatherer(); + foreach (var field in select.Fields) + { + var expandedFields = fieldGatherer.Gather(field.Expression); + foreach (var expandedField in expandedFields) + _queryObject.Fields[expandedField.Name] = 1; + } + } + + if (select.OrderBy != null) + { + foreach (var order in select.OrderBy) + { + var field = Visit(order.Expression) as FieldExpression; + if (field == null) + throw new InvalidQueryException("Complex order by clauses are not supported."); + _queryObject.AddSort(field.Name, order.OrderType == OrderType.Ascending ? 1 : -1); + } + } + + if (select.Take != null) + _queryObject.NumberToLimit = EvaluateConstant(select.Take); + + if (select.Skip != null) + _queryObject.NumberToSkip = EvaluateConstant(select.Skip); + + return select; + } + + protected override Expression VisitProjection(ProjectionExpression projection) + { + Visit(projection.Source); + return projection; + } + + protected override Expression VisitSource(Expression source) + { + switch ((MongoExpressionType)source.NodeType) + { + case MongoExpressionType.Collection: + var collection = (CollectionExpression)source; + _queryObject.CollectionName = collection.CollectionName; + _queryObject.Database = collection.Database; + _queryObject.DocumentType = collection.DocumentType; + break; + case MongoExpressionType.Select: + Visit(source); + break; + default: + throw new InvalidOperationException("Select source is not valid type"); + } + return source; + } + + private SelectExpression PreProcessSelect(SelectExpression select) + { + if (select.Where != null && select.Where.NodeType == ExpressionType.Constant && select.Where.Type == typeof(bool)) + { + var value = EvaluateConstant(select.Where); + if (value) + select = select.SetWhere(null); + else + throw new InvalidQueryException("If you don't want to return any values, don't call the method."); + } + + return select; + } + + private static T EvaluateConstant(Expression e) + { + if (e.NodeType != ExpressionType.Constant) + throw new ArgumentException("Expression must be a constant."); + + return (T)((ConstantExpression)e).Value; + } + + private class QueryAttributes + { + public bool IsCount { get; private set; } + public bool IsMapReduce { get; private set; } + + public QueryAttributes(bool isCount, bool isMapReduce) + { + IsCount = isCount; + IsMapReduce = isMapReduce; + } + } + + private class QueryAttributesGatherer : MongoExpressionVisitor + { + private bool _isCount { get; set; } + private bool _isMapReduce { get; set; } + + public QueryAttributes Gather(Expression expression) + { + _isCount = false; + _isMapReduce = false; + Visit(expression); + return new QueryAttributes(_isCount, _isMapReduce); + } + + protected override Expression VisitSelect(SelectExpression select) + { + if (select.From.NodeType != (ExpressionType)MongoExpressionType.Collection) + throw new InvalidQueryException("The query is too complex to be processed by MongoDB. Try building a map-reduce query by hand or simplifying the query and using Linq-to-Objects."); + + bool hasAggregates = new AggregateChecker().HasAggregates(select); + + if (select.GroupBy != null) + _isMapReduce = true; + + else if (hasAggregates) + { + if (select.Fields.Count == 1 && select.Fields[0].Expression.NodeType == (ExpressionType)MongoExpressionType.Aggregate) + { + var aggregateExpression = (AggregateExpression)select.Fields[0].Expression; + if (aggregateExpression.AggregateType == AggregateType.Count) + _isCount = true; + } + + if (!_isCount) + _isMapReduce = true; + } + + Visit(select.Where); + return select; + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/Nominator.cs b/source/MongoDB/Linq/Translators/Nominator.cs new file mode 100644 index 00000000..8116d488 --- /dev/null +++ b/source/MongoDB/Linq/Translators/Nominator.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class Nominator : MongoExpressionVisitor + { + private readonly Func _predicate; + private HashSet _candidates; + private bool _isBlocked; + + public Nominator(Func predicate) + { + if (predicate == null) + throw new ArgumentNullException("predicate"); + + _predicate = predicate; + } + + public HashSet Nominate(Expression expression) + { + _candidates = new HashSet(); + _isBlocked = false; + Visit(expression); + return _candidates; + } + + protected override Expression Visit(Expression expression) + { + if (expression != null) + { + var saveIsBlocked = _isBlocked; + _isBlocked = false; + base.Visit(expression); + if (!_isBlocked) + { + if (_predicate(expression)) + _candidates.Add(expression); + else + _isBlocked = true; + } + _isBlocked |= saveIsBlocked; + } + return expression; + } + + protected override Expression VisitField(FieldExpression f) + { + return f; + } + } +} diff --git a/source/MongoDB/Linq/Translators/OrderByRewriter.cs b/source/MongoDB/Linq/Translators/OrderByRewriter.cs new file mode 100644 index 00000000..2111dd39 --- /dev/null +++ b/source/MongoDB/Linq/Translators/OrderByRewriter.cs @@ -0,0 +1,163 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; +using System.Collections.ObjectModel; + +namespace MongoDB.Linq.Translators +{ + internal class OrderByRewriter : MongoExpressionVisitor + { + private IList _gatheredOrderings; + private HashSet _uniqueColumns; + private bool _isOutermostSelect; + + public Expression Rewrite(Expression expression) + { + _isOutermostSelect = true; + return Visit(expression); + } + + protected override Expression VisitSelect(SelectExpression select) + { + var saveIsOuterMostSelect = _isOutermostSelect; + try + { + _isOutermostSelect = false; + select = (SelectExpression)base.VisitSelect(select); + + var hasOrderBy = select.OrderBy != null && select.OrderBy.Count > 0; + var hasGroupBy = select.GroupBy != null; + var canHaveOrderings = saveIsOuterMostSelect || select.Take != null || select.Skip != null; + var canReceivedOrderings = canHaveOrderings && !hasGroupBy && !select.IsDistinct; + + if (hasOrderBy) + PrependOrderings(select.OrderBy); + + IEnumerable orderings = null; + if (canReceivedOrderings) + orderings = _gatheredOrderings; + else if (canHaveOrderings) + orderings = select.OrderBy; + + var canPassOnOrderings = !saveIsOuterMostSelect && !hasGroupBy && !select.IsDistinct; + ReadOnlyCollection fields = select.Fields; + if (_gatheredOrderings != null) + { + if (canPassOnOrderings) + { + var producedAliases = new DeclaredAliasGatherer().Gather(select.From); + + BindResult project = RebindOrderings(_gatheredOrderings, select.Alias, producedAliases, select.Fields); + _gatheredOrderings = null; + PrependOrderings(project.Orderings); + fields = project.Fields; + } + else + _gatheredOrderings = null; + } + if (orderings != select.OrderBy || fields != select.Fields) + select = new SelectExpression(select.Alias, fields, select.From, select.Where, orderings, select.GroupBy, select.IsDistinct, select.Skip, select.Take); + + return select; + } + finally + { + _isOutermostSelect = saveIsOuterMostSelect; + } + } + + protected override Expression VisitSubquery(SubqueryExpression subquery) + { + var saveOrderings = _gatheredOrderings; + _gatheredOrderings = null; + var result = base.VisitSubquery(subquery); + _gatheredOrderings = saveOrderings; + return result; + } + + private void PrependOrderings(IList newOrderings) + { + if(newOrderings == null) + return; + + if (_gatheredOrderings == null) + { + _gatheredOrderings = new List(); + _uniqueColumns = new HashSet(); + } + + for (int i = newOrderings.Count - 1; i >= 0; i--) + { + var ordering = newOrderings[i]; + var field = ordering.Expression as FieldExpression; + if (field != null) + { + var hash = field.Alias + ":" + field.Name; + if (!_uniqueColumns.Contains(hash)) + { + _gatheredOrderings.Insert(0, ordering); + _uniqueColumns.Add(hash); + } + } + else + _gatheredOrderings.Insert(0, ordering); + } + } + + private BindResult RebindOrderings(IEnumerable orderings, Alias alias, ICollection existingAliases, IEnumerable existingFields) + { + List newFields = null; + var newOrderings = new List(); + foreach (var ordering in orderings) + { + var expression = ordering.Expression; + var field = expression as FieldExpression; + + if(field != null && (existingAliases == null || !existingAliases.Contains(field.Alias))) + continue; + + int ordinal = 0; + foreach (var fieldDecl in existingFields) + { + var fieldDeclExpression = fieldDecl.Expression as FieldExpression; + if (fieldDecl.Expression == ordering.Expression || (field != null && fieldDeclExpression != null && field.Alias == fieldDeclExpression.Alias && field.Name == fieldDeclExpression.Name)) + { + if(field != null) + expression = new FieldExpression(field.Expression, alias, fieldDecl.Name); + break; + } + ordinal++; + } + + if (expression == ordering.Expression) + { + if (newFields == null) + { + newFields = new List(existingFields); + existingFields = newFields; + } + + var fieldName = field != null ? field.Name : "_$f" + ordinal; + newFields.Add(new FieldDeclaration(fieldName, ordering.Expression)); + expression = new FieldExpression(expression, alias, fieldName); + } + + newOrderings.Add(new OrderExpression(ordering.OrderType, expression)); + } + return new BindResult(existingFields, newOrderings); + } + + private class BindResult + { + public ReadOnlyCollection Fields { get; private set; } + + public ReadOnlyCollection Orderings { get; private set; } + + public BindResult(IEnumerable fields, IEnumerable orderings) + { + Fields = fields as ReadOnlyCollection ?? new List(fields).AsReadOnly(); + Orderings = orderings as ReadOnlyCollection ?? new List(orderings).AsReadOnly(); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/PartialEvaluator.cs b/source/MongoDB/Linq/Translators/PartialEvaluator.cs new file mode 100644 index 00000000..cde17f26 --- /dev/null +++ b/source/MongoDB/Linq/Translators/PartialEvaluator.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Linq.Expressions; + +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal static class PartialEvaluator + { + /// + /// Performs evaluation and replacement of independent sub-trees + /// + /// The root of the expression tree. + /// A function that decides whether a given expression node can be part of the local function. + /// + /// A new tree with sub-trees evaluated and replaced. + /// + public static Expression Evaluate(Expression expression, Func canBeEvaluated) + { + return new SubtreeEvaluator(new Nominator(canBeEvaluated).Nominate(expression)).Eval(expression); + } + + /// + /// + /// + private class SubtreeEvaluator : ExpressionVisitor + { + private readonly HashSet _candidates; + + internal SubtreeEvaluator(HashSet candidates) + { + _candidates = candidates; + } + + internal Expression Eval(Expression exp) + { + return Visit(exp); + } + + protected override Expression Visit(Expression exp) + { + if (exp == null) + return null; + if (_candidates.Contains(exp)) + { + return Evaluate(exp); + } + + return base.Visit(exp); + } + + private Expression Evaluate(Expression e) + { + if (e.NodeType == ExpressionType.Constant) + return e; + + var lambda = Expression.Lambda(e); + var fn = lambda.Compile(); + return Expression.Constant(fn.DynamicInvoke(null), e.Type); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/ProjectionBuilder.cs b/source/MongoDB/Linq/Translators/ProjectionBuilder.cs new file mode 100644 index 00000000..e39bbe8e --- /dev/null +++ b/source/MongoDB/Linq/Translators/ProjectionBuilder.cs @@ -0,0 +1,98 @@ +using System; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class ProjectionBuilder : MongoExpressionVisitor + { + private bool _isMapReduce; + private ParameterExpression _document; + private readonly GroupingKeyDeterminer _determiner; + + public ProjectionBuilder() + { + _determiner = new GroupingKeyDeterminer(); + } + + public LambdaExpression Build(Expression projector, Type documentType, string parameterName, bool isMapReduce) + { + _isMapReduce = isMapReduce; + if (_isMapReduce) + _document = Expression.Parameter(typeof(Document), parameterName); + else + _document = Expression.Parameter(documentType, parameterName); + + return Expression.Lambda(Visit(projector), _document); + } + + protected override Expression VisitField(FieldExpression field) + { + if(!_isMapReduce) + return Visit(field.Expression); + + var parts = field.Name.Split('.'); + + bool isGroupingField = _determiner.IsGroupingKey(field); + Expression current; + if(parts.Contains("Key") && isGroupingField) + current = _document; + else + current = Expression.Call( + _document, + "Get", + new[] {typeof(Document)}, + Expression.Constant("value")); + + for(int i = 0, n = parts.Length; i < n; i++) + { + var type = i == n - 1 ? field.Type : typeof(Document); + + if(parts[i] == "Key" && isGroupingField) + parts[i] = "_id"; + + current = Expression.Call( + current, + "Get", + new[] {type}, + Expression.Constant(parts[i])); + } + + return current; + } + + protected override Expression VisitParameter(ParameterExpression p) + { + return _document; + } + + private class GroupingKeyDeterminer : MongoExpressionVisitor + { + private bool _isGroupingKey; + + public bool IsGroupingKey(Expression exp) + { + _isGroupingKey = false; + Visit(exp); + return _isGroupingKey; + } + + protected override Expression Visit(Expression exp) + { + if (exp == null) + return exp; + + if (_isGroupingKey) + return exp; + + if (exp.Type.IsGenericType && exp.Type.GetGenericTypeDefinition() == typeof(Grouping<,>)) + { + _isGroupingKey = true; + return exp; + } + return base.Visit(exp); + } + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/QueryBinder.cs b/source/MongoDB/Linq/Translators/QueryBinder.cs new file mode 100644 index 00000000..c6292553 --- /dev/null +++ b/source/MongoDB/Linq/Translators/QueryBinder.cs @@ -0,0 +1,614 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class QueryBinder : MongoExpressionVisitor + { + private int _aggregateCount; + private Expression _currentGroupElement; + private Dictionary _groupByMap; + private Dictionary _map; + private readonly FieldProjector _projector; + private IQueryProvider _provider; + private readonly Expression _root; + private List _thenBy; + private bool _inField; + + public QueryBinder(IQueryProvider provider, Expression root) + { + _projector = new FieldProjector(CanBeField); + _provider = provider; + _root = root; + } + + public Expression Bind(Expression expression) + { + _inField = false; + _map = new Dictionary(); + _groupByMap = new Dictionary(); + return Visit(expression); + } + + protected override Expression VisitBinary(BinaryExpression b) + { + ExpressionType nodeType = b.NodeType; + bool shouldFlip = false; + switch (nodeType) + { + case ExpressionType.LessThan: + nodeType = ExpressionType.GreaterThanOrEqual; + shouldFlip = true; + break; + case ExpressionType.LessThanOrEqual: + nodeType = ExpressionType.GreaterThan; + shouldFlip = true; + break; + case ExpressionType.GreaterThan: + nodeType = ExpressionType.LessThanOrEqual; + shouldFlip = true; + break; + case ExpressionType.GreaterThanOrEqual: + nodeType = ExpressionType.LessThan; + shouldFlip = true; + break; + case ExpressionType.NotEqual: + shouldFlip = true; + break; + case ExpressionType.Equal: + shouldFlip = true; + break; + } + + //reverse the conditionals if the left one is a constant to make things easier in the formatter... + if (shouldFlip && b.Left.NodeType == ExpressionType.Constant) + b = Expression.MakeBinary(nodeType, b.Right, b.Left, b.IsLiftedToNull, b.Method, b.Conversion); + + return base.VisitBinary(b); + } + + protected override Expression VisitConstant(ConstantExpression c) + { + if (IsCollection(c.Value)) + return GetCollectionProjection(c.Value); + return base.VisitConstant(c); + } + + protected override Expression VisitField(FieldExpression f) + { + _inField = true; + var e = base.VisitField(f); + _inField = false; + return e; + } + + protected override Expression VisitMemberAccess(MemberExpression m) + { + var source = Visit(m.Expression); + switch (source.NodeType) + { + case ExpressionType.MemberInit: + var init = (MemberInitExpression)source; + for (int i = 0, n = init.Bindings.Count; i < n; i++) + { + var ma = init.Bindings[i] as MemberAssignment; + if (ma != null && MembersMatch(ma.Member, m.Member)) + return ma.Expression; + } + break; + case ExpressionType.New: + var nex = (NewExpression)source; + if (nex.Members != null) + { + for (int i = 0, n = nex.Members.Count; i < n; i++) + { + if (MembersMatch(nex.Members[i], m.Member)) + return nex.Arguments[i]; + } + } + break; + } + + if (source == m.Expression) + return m; + + return Expression.MakeMemberAccess(source, m.Member); + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + if (m.Method.DeclaringType == typeof(Queryable) || m.Method.DeclaringType == typeof(Enumerable)) + { + //if we are running off a field expression, things get handled in the QueryFormatter + if (!IsOperationOnAField(m)) + { + switch (m.Method.Name) + { + case "Any": + if (m.Arguments.Count == 1) + return BindAny(m.Arguments[0], null, m == _root); + else + return BindAny(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), m == _root); + case "Where": + return BindWhere(m.Type, m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1])); + case "Select": + return BindSelect(m.Type, m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1])); + case "OrderBy": + return BindOrderBy(m.Type, m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), OrderType.Ascending); + case "OrderByDescending": + return BindOrderBy(m.Type, m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), OrderType.Descending); + case "ThenBy": + return BindThenBy(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), OrderType.Ascending); + case "ThenByDescending": + return BindThenBy(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), OrderType.Descending); + case "Take": + if (m.Arguments.Count == 2) + return this.BindTake(m.Arguments[0], m.Arguments[1]); + break; + case "Skip": + if (m.Arguments.Count == 2) + return this.BindSkip(m.Arguments[0], m.Arguments[1]); + break; + case "First": + case "FirstOrDefault": + case "Single": + case "SingleOrDefault": + if (m.Arguments.Count == 1) + return BindFirstOrSingle(m.Arguments[0], null, m.Method.Name, m == _root); + if (m.Arguments.Count == 2) + { + var predicate = (LambdaExpression)StripQuotes(m.Arguments[1]); + return BindFirstOrSingle(m.Arguments[0], predicate, m.Method.Name, m == _root); + } + break; + case "Count": + case "Sum": + case "Average": + case "Min": + case "Max": + switch(m.Arguments.Count) + { + case 1: + return BindAggregate(m.Arguments[0], m.Method, null, m == _root); + case 2: + { + var argument = (LambdaExpression)StripQuotes(m.Arguments[1]); + return BindAggregate(m.Arguments[0], m.Method, argument, m == _root); + } + } + break; + case "GroupBy": + if (m.Arguments.Count == 2) + return BindGroupBy(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), null, null); + if (m.Arguments.Count == 3) + return BindGroupBy(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), (LambdaExpression)StripQuotes(m.Arguments[2]), null); + if (m.Arguments.Count == 4) + return BindGroupBy(m.Arguments[0], (LambdaExpression)StripQuotes(m.Arguments[1]), (LambdaExpression)StripQuotes(m.Arguments[2]), (LambdaExpression)StripQuotes(m.Arguments[3])); + break; + } + throw new NotSupportedException(string.Format("The method '{0}' is not supported", m.Method.Name)); + } + } + return base.VisitMethodCall(m); + } + + protected override Expression VisitParameter(ParameterExpression p) + { + Expression e; + return _map.TryGetValue(p, out e) ? e : p; + } + + private Expression BindAggregate(Expression source, MethodInfo method, LambdaExpression argument, bool isRoot) + { + var returnType = method.ReturnType; + var aggregateType = GetAggregateType(method.Name); + bool hasPredicateArgument = HasPredicateArgument(aggregateType); + bool distinct = false; + bool argumentWasPredicate = false; + + var methodCallExpression = source as MethodCallExpression; + if (methodCallExpression != null && !hasPredicateArgument && argument == null) + { + if (methodCallExpression.Method.Name == "Distinct" && methodCallExpression.Arguments.Count == 1 + && (methodCallExpression.Method.DeclaringType == typeof(Queryable) || methodCallExpression.Method.DeclaringType == typeof(Enumerable))) + { + source = methodCallExpression.Arguments[0]; + distinct = true; + } + } + + if (argument != null && hasPredicateArgument) + { + source = Expression.Call(typeof(Queryable), "Where", method.GetGenericArguments(), source, argument); + argument = null; + argumentWasPredicate = true; + } + + var projection = VisitSequence(source); + Expression argExpression = null; + if (argument != null) + { + _map[argument.Parameters[0]] = projection.Projector; + argExpression = Visit(argument.Body); + } + else if (!hasPredicateArgument) + argExpression = projection.Projector; + + var alias = new Alias(); + Expression aggregateExpression = new AggregateExpression(returnType, aggregateType, argExpression, distinct); + var selectType = typeof(IEnumerable<>).MakeGenericType(returnType); + string fieldName = "_$agg" + (_aggregateCount++); + var select = new SelectExpression(alias, new[] { new FieldDeclaration(fieldName, aggregateExpression) }, projection.Source, null); + + if (isRoot) + { + var parameter = Expression.Parameter(selectType, "p"); + var lambda = Expression.Lambda(Expression.Call(typeof(Enumerable), "Single", new[] { returnType }, parameter), parameter); + return new ProjectionExpression( + select, + new FieldExpression(aggregateExpression, alias, fieldName), + lambda); + } + + var subquery = new ScalarExpression(returnType, select); + + GroupByInfo info; + if (!argumentWasPredicate && _groupByMap.TryGetValue(projection, out info)) + { + if (argument != null) + { + _map[argument.Parameters[0]] = info.Element; + argExpression = Visit(argument.Body); + } + else if (!hasPredicateArgument) + argExpression = info.Element; + + aggregateExpression = new AggregateExpression(returnType, aggregateType, argExpression, distinct); + + if (projection == _currentGroupElement) + return aggregateExpression; + + return new AggregateSubqueryExpression(info.Alias, aggregateExpression, subquery); + } + + return subquery; + } + + private Expression BindAny(Expression source, LambdaExpression predicate, bool isRoot) + { + var projection = VisitSequence(source); + var sourceType = projection.Projector.Type; + + MethodInfo method = typeof(Queryable) + .GetMethods(BindingFlags.Public | BindingFlags.Static) + .Where(m => m.Name == "Count") + .Single(m => m.GetParameters().Length == (predicate == null ? 1 : 2)) + .GetGenericMethodDefinition().MakeGenericMethod(sourceType); + + var expression = BindAggregate(source, method, predicate, isRoot); + + return Expression.GreaterThan( + expression, Expression.Constant(0)); + } + + private Expression BindDistinct(Expression source) + { + var projection = VisitSequence(source); + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, null, null, null, true, null, null), + fieldProjection.Projector); + } + + private Expression BindFirstOrSingle(Expression source, LambdaExpression predicate, string kind, bool isRoot) + { + var projection = VisitSequence(source); + Expression where = null; + if (predicate != null) + { + _map[predicate.Parameters[0]] = projection.Projector; + where = Visit(predicate.Body); + } + + Expression take = kind.StartsWith("First") ? Expression.Constant(1) : null; + if (take == null & kind.StartsWith("Single")) + take = Expression.Constant(2); + + if (take != null || where != null) + { + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + projection = new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, where, null, null, false, null, take), + fieldProjection.Projector); + } + if (isRoot) + { + var elementType = projection.Projector.Type; + var p = Expression.Parameter(typeof(IEnumerable<>).MakeGenericType(elementType), "p"); + var lambda = Expression.Lambda(Expression.Call(typeof(Enumerable), kind, new[] { elementType }, p), p); + return new ProjectionExpression(projection.Source, projection.Projector, lambda); + } + return projection; + } + + protected virtual Expression BindGroupBy(Expression source, LambdaExpression keySelector, LambdaExpression elementSelector, LambdaExpression resultSelector) + { + var projection = VisitSequence(source); + + _map[keySelector.Parameters[0]] = projection.Projector; + var keyExpression = Visit(keySelector.Body); + + var elementExpression = projection.Projector; + if (elementSelector != null) + { + _map[elementSelector.Parameters[0]] = projection.Projector; + elementExpression = Visit(elementSelector.Body); + } + + var subqueryBasis = VisitSequence(source); + _map[keySelector.Parameters[0]] = subqueryBasis.Projector; + var subqueryKeyExpression = Visit(keySelector.Body); + + var subqueryCorrelation = Expression.Equal(keyExpression, subqueryKeyExpression); + + var subqueryElementExpression = subqueryBasis.Projector; + if (elementSelector != null) + { + _map[elementSelector.Parameters[0]] = subqueryBasis.Projector; + subqueryElementExpression = Visit(elementSelector.Body); + } + + var elementAlias = new Alias(); + var elementProjection = _projector.ProjectFields(subqueryElementExpression, elementAlias, subqueryBasis.Source.Alias); + var elementSubquery = + new ProjectionExpression( + new SelectExpression(elementAlias, elementProjection.Fields, subqueryBasis.Source, subqueryCorrelation), + elementProjection.Projector); + + var alias = new Alias(); + + var info = new GroupByInfo(alias, elementExpression); + _groupByMap[elementSubquery] = info; + + Expression resultExpression; + if (resultSelector != null) + { + var saveGroupElement = _currentGroupElement; + _currentGroupElement = elementSubquery; + + _map[resultSelector.Parameters[0]] = keyExpression; + _map[resultSelector.Parameters[1]] = elementSubquery; + resultExpression = Visit(resultSelector.Body); + _currentGroupElement = saveGroupElement; + } + else + { + resultExpression = Expression.New( + typeof(Grouping<,>).MakeGenericType(keyExpression.Type, subqueryElementExpression.Type).GetConstructors()[0], + new[] { keyExpression, elementSubquery }); + } + + var fieldProjection = _projector.ProjectFields(resultExpression, alias, projection.Source.Alias); + + var projectedElementSubquery = ((NewExpression)fieldProjection.Projector).Arguments[1]; + _groupByMap[projectedElementSubquery] = info; + + return new ProjectionExpression( + new SelectExpression(alias, new FieldDeclaration[0], projection.Source, null, null, keyExpression, false, null, null), + fieldProjection.Projector); + } + + private Expression BindOrderBy(Type resultType, Expression source, LambdaExpression orderSelector, OrderType orderType) + { + List thenBye = _thenBy; + _thenBy = null; + var projection = VisitSequence(source); + + _map[orderSelector.Parameters[0]] = projection.Projector; + var orderings = new List {new OrderExpression(orderType, Visit(orderSelector.Body))}; + if (thenBye != null) + { + for (int i = thenBye.Count - 1; i >= 0; i--) + { + var oe = thenBye[i]; + var lambda = (LambdaExpression)oe.Expression; + _map[lambda.Parameters[0]] = projection.Projector; + orderings.Add(new OrderExpression(oe.OrderType, Visit(lambda.Body))); + } + } + + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, null, orderings.AsReadOnly(), null, false, null, null), + fieldProjection.Projector); + } + + private Expression BindSelect(Type resultType, Expression source, LambdaExpression selector) + { + var projection = VisitSequence(source); + _map[selector.Parameters[0]] = projection.Projector; + var expression = Visit(selector.Body); + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(expression, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, null), + fieldProjection.Projector); + } + + private Expression BindSkip(Expression source, Expression skip) + { + var projection = VisitSequence(source); + skip = Visit(skip); + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, null, null, null, false, skip, null), + fieldProjection.Projector); + } + + private Expression BindTake(Expression source, Expression take) + { + var projection = VisitSequence(source); + take = Visit(take); + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, null, null, null, false, null, take), + fieldProjection.Projector); + } + + private Expression BindThenBy(Expression source, LambdaExpression orderSelector, OrderType orderType) + { + if (_thenBy == null) + _thenBy = new List(); + + _thenBy.Add(new OrderExpression(orderType, orderSelector)); + return Visit(source); + } + + private Expression BindWhere(Type resultType, Expression source, LambdaExpression predicate) + { + var projection = VisitSequence(source); + _map[predicate.Parameters[0]] = projection.Projector; + var where = Visit(predicate.Body); + var alias = new Alias(); + var fieldProjection = _projector.ProjectFields(projection.Projector, alias, projection.Source.Alias); + return new ProjectionExpression( + new SelectExpression(alias, fieldProjection.Fields, projection.Source, where), + fieldProjection.Projector); + } + + private ProjectionExpression GetCollectionProjection(object value) + { + var collectionAlias = new Alias(); + var selectAlias = new Alias(); + var collection = (IMongoQueryable)value; + var fields = new List(); + return new ProjectionExpression( + new SelectExpression(selectAlias, fields, new CollectionExpression(collectionAlias, collection.Database, collection.CollectionName, collection.ElementType), null), + Expression.Parameter(collection.ElementType, "document")); + } + + private Expression BuildPredicateEqual(IEnumerable source1, IEnumerable source2) + { + var en1 = source1.GetEnumerator(); + var en2 = source2.GetEnumerator(); + Expression result = null; + while (en1.MoveNext() && en2.MoveNext()) + { + Expression compare = Expression.Equal(en1.Current, en2.Current); + result = (result == null) ? compare : Expression.And(result, compare); + } + return result; + } + + private ProjectionExpression ConvertToSequence(Expression expression) + { + switch (expression.NodeType) + { + case (ExpressionType)MongoExpressionType.Projection: + return (ProjectionExpression)expression; + case ExpressionType.New: + var newExpression = (NewExpression)expression; + if (expression.Type.IsGenericType && expression.Type.GetGenericTypeDefinition() == typeof(Grouping<,>)) + return (ProjectionExpression)newExpression.Arguments[1]; + break; + } + + throw new NotSupportedException(string.Format("The expression of type '{0}' is not a sequence", expression.Type)); + } + + private bool IsOperationOnAField(MethodCallExpression m) + { + return _inField + || m.Arguments[0].NodeType == (ExpressionType)MongoExpressionType.Field + || (m.Arguments.Count == 2 && m.Arguments[1].NodeType == (ExpressionType)MongoExpressionType.Field); + } + + private ProjectionExpression VisitSequence(Expression source) + { + return ConvertToSequence(Visit(source)); + } + + internal static bool CanBeField(Expression expression) + { + switch (expression.NodeType) + { + case (ExpressionType)MongoExpressionType.Aggregate: + case (ExpressionType)MongoExpressionType.AggregateSubquery: + case (ExpressionType)MongoExpressionType.Field: + case (ExpressionType)MongoExpressionType.Scalar: + return true; + default: + return false; + } + } + + private static AggregateType GetAggregateType(string methodName) + { + switch (methodName) + { + case "Count": + return AggregateType.Count; + case "Sum": + return AggregateType.Sum; + case "Average": + return AggregateType.Average; + case "Min": + return AggregateType.Min; + case "Max": + return AggregateType.Max; + } + + throw new NotSupportedException(string.Format("Aggregate of type '{0}' is not supported.", methodName)); + } + + private static bool HasPredicateArgument(AggregateType aggregateType) + { + return aggregateType == AggregateType.Count; + } + + private static bool IsCollection(object value) + { + var q = value as IMongoQueryable; + return q != null && q.Expression.NodeType == ExpressionType.Constant; + } + + private static bool MembersMatch(MemberInfo a, MemberInfo b) + { + if (a == b) + return true; + if (a is MethodInfo && b is PropertyInfo) + return a == ((PropertyInfo)b).GetGetMethod(); + if (a is PropertyInfo && b is MethodInfo) + return ((PropertyInfo)a).GetGetMethod() == b; + return false; + } + + private static Expression StripQuotes(Expression e) + { + while (e.NodeType == ExpressionType.Quote) + e = ((UnaryExpression)e).Operand; + return e; + } + + private class GroupByInfo + { + public Alias Alias { get; private set; } + public Expression Element { get; private set; } + + public GroupByInfo(Alias alias, Expression element) + { + Alias = alias; + Element = element; + } + } + } +} diff --git a/source/MongoDB/Linq/Translators/QueryDuplicator.cs b/source/MongoDB/Linq/Translators/QueryDuplicator.cs new file mode 100644 index 00000000..b598cb90 --- /dev/null +++ b/source/MongoDB/Linq/Translators/QueryDuplicator.cs @@ -0,0 +1,41 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class QueryDuplicator : MongoExpressionVisitor + { + Dictionary _map; + + public Expression Duplicate(Expression expression) + { + _map = new Dictionary(); + return Visit(expression); + } + + protected override Expression VisitCollection(CollectionExpression collection) + { + var newAlias = new Alias(); + _map[collection.Alias] = newAlias; + return new CollectionExpression(newAlias, collection.Database, collection.CollectionName, collection.DocumentType); + } + + protected override Expression VisitSelect(SelectExpression select) + { + var newAlias = new Alias(); + _map[select.Alias] = newAlias; + select = (SelectExpression)base.VisitSelect(select); + return new SelectExpression(newAlias, select.Fields, select.From, select.Where, select.OrderBy, select.GroupBy, select.IsDistinct, select.Skip, select.Take); + } + + protected override Expression VisitField(FieldExpression field) + { + Alias newAlias; + if (_map.TryGetValue(field.Alias, out newAlias)) + return new FieldExpression(field.Expression, newAlias, field.Name); + + return field; + } + } +} diff --git a/source/MongoDB/Linq/Translators/RedundantFieldRemover.cs b/source/MongoDB/Linq/Translators/RedundantFieldRemover.cs new file mode 100644 index 00000000..d4848195 --- /dev/null +++ b/source/MongoDB/Linq/Translators/RedundantFieldRemover.cs @@ -0,0 +1,76 @@ +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; +using System.Collections; + +namespace MongoDB.Linq.Translators +{ + internal class RedundantFieldRemover : MongoExpressionVisitor + { + private Dictionary _map; + + public Expression Remove(Expression expression) + { + _map = new Dictionary(); + return Visit(expression); + } + + protected override Expression VisitField(FieldExpression field) + { + FieldExpression mapped; + if (_map.TryGetValue(field, out mapped)) + return mapped; + return field; + } + + protected override Expression VisitSelect(SelectExpression select) + { + select = (SelectExpression)base.VisitSelect(select); + + var fields = select.Fields.OrderBy(f => f.Name).ToList(); + var removed = new BitArray(fields.Count); + var anyRemoved = false; + for (int i = 0, n = fields.Count; i < n; i++) + { + var fi = fields[i]; + var fxi = new FieldExpression(fi.Expression, select.Alias, fi.Name); + for (int j = i + 1; j < n; j++) + { + if (!removed.Get(i)) + { + FieldDeclaration fj = fields[j]; + if (AreSameExpression(fi.Expression, fj.Expression)) + { + var fxj = new FieldExpression(fj.Expression, select.Alias, fj.Name); + _map.Add(fxj, fxi); + removed.Set(j, true); + anyRemoved = true; + } + } + } + } + + if (anyRemoved) + { + var newFields = new List(); + for (int i = 0, n = fields.Count; i < n; i++) + { + if (!removed.Get(i)) + newFields.Add(fields[i]); + } + select = select.SetFields(newFields); + } + return select; + } + + private bool AreSameExpression(Expression a, Expression b) + { + if (a == b) + return true; + var fa = a as FieldExpression; + var fb = b as FieldExpression; + return fa != null && fb != null && fa.Alias == fb.Alias && fa.Name == fb.Name; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/RedundantSubqueryRemover.cs b/source/MongoDB/Linq/Translators/RedundantSubqueryRemover.cs new file mode 100644 index 00000000..7af69b17 --- /dev/null +++ b/source/MongoDB/Linq/Translators/RedundantSubqueryRemover.cs @@ -0,0 +1,126 @@ +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class RedundantSubqueryRemover : MongoExpressionVisitor + { + private bool _isTopLevel; + + public Expression Remove(Expression expression) + { + _isTopLevel = true; + return Visit(expression); + } + + protected override Expression VisitSelect(SelectExpression select) + { + bool wasTopLevel = _isTopLevel; + _isTopLevel = false; + + select = (SelectExpression)base.VisitSelect(select); + + while (CanMergeWithFrom(select, wasTopLevel)) + { + var fromSelect = (SelectExpression)select.From; + + select = (SelectExpression)new SubqueryRemover().Remove(select, new[] { fromSelect }); + + var where = select.Where; + if(fromSelect.Where != null) + { + if (where != null) + where = Expression.And(fromSelect.Where, where); + else + where = fromSelect.Where; + } + + var groupBy = select.GroupBy ?? fromSelect.GroupBy; + var orderBy = select.OrderBy != null && select.OrderBy.Count > 0 ? select.OrderBy : fromSelect.OrderBy; + var skip = select.Skip ?? fromSelect.Skip; + var take = select.Take ?? fromSelect.Take; + bool distinct = select.IsDistinct | fromSelect.IsDistinct; + var fields = select.Fields.Count > 0 ? select.Fields : fromSelect.Fields; + + if (where != select.Where + || orderBy != select.OrderBy + || groupBy != select.GroupBy + || distinct != select.IsDistinct + || skip != select.Skip + || take != select.Take + || fields != select.Fields) + { + select = new SelectExpression(select.Alias, fields, select.From, where, orderBy, groupBy, distinct, skip, take); + } + } + + return select; + } + + private static bool CanMergeWithFrom(SelectExpression select, bool isTopLevel) + { + var fromSelect = select.From as SelectExpression; + if (fromSelect == null) + return false; + + var fromIsSimpleProjection = IsSimpleProjection(fromSelect); + var fromIsNameMapProjection = IsNameMapProjection(fromSelect); + if (!fromIsSimpleProjection && !fromIsNameMapProjection) + return false; + + var selectIsNameMapProjection = IsNameMapProjection(select); + var selectHasOrderBy = select.OrderBy != null && select.OrderBy.Count > 0; + var selectHasGroupBy = select.GroupBy != null; + var selectHasAggregates = new AggregateChecker().HasAggregates(select); + var fromHasOrderBy = fromSelect.OrderBy != null && fromSelect.OrderBy.Count > 0; + var fromHasGroupBy = fromSelect.GroupBy != null; + + if (selectHasOrderBy && fromHasOrderBy) + return false; + + if (selectHasGroupBy && fromHasGroupBy) + return false; + + if(fromHasOrderBy && (selectHasGroupBy || selectHasAggregates || select.IsDistinct)) + return false; + + if(fromHasGroupBy && select.Where != null) + return false; + + if(fromSelect.Take != null && (select.Take != null || select.Skip != null || select.IsDistinct || selectHasAggregates || selectHasGroupBy)) + return false; + + if(fromSelect.Skip != null && (select.Skip != null || select.IsDistinct || selectHasAggregates || selectHasGroupBy)) + return false; + + if (fromSelect.IsDistinct && (select.Take != null || select.Skip != null || !selectIsNameMapProjection || selectHasGroupBy || selectHasAggregates || (selectHasOrderBy && !isTopLevel))) + return false; + + return true; + } + + private static bool IsNameMapProjection(SelectExpression select) + { + var fromSelect = select.From as SelectExpression; + if (select.Fields.Count == 0) + return true; + + if (fromSelect == null || select.Fields.Count != fromSelect.Fields.Count) + return false; + + for (int i = 0, n = select.Fields.Count; i < n; i++) + { + if (select.Fields[i].Name != fromSelect.Fields[i].Name) + return false; + } + + return true; + } + + private static bool IsSimpleProjection(SelectExpression select) + { + return select.Fields.All(field => !string.IsNullOrEmpty(field.Name)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Linq/Translators/SubqueryRemover.cs b/source/MongoDB/Linq/Translators/SubqueryRemover.cs new file mode 100644 index 00000000..1c0bc3da --- /dev/null +++ b/source/MongoDB/Linq/Translators/SubqueryRemover.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; +using System.Linq.Expressions; +using MongoDB.Linq.Expressions; + +namespace MongoDB.Linq.Translators +{ + internal class SubqueryRemover : MongoExpressionVisitor + { + private HashSet _selectsToRemove; + + public Expression Remove(SelectExpression outerSelect, IEnumerable selectsToRemove) + { + _selectsToRemove = new HashSet(selectsToRemove); + return Visit(outerSelect); + } + + protected override Expression VisitSelect(SelectExpression s) + { + return _selectsToRemove.Contains(s) ? Visit(s.From) : base.VisitSelect(s); + } + } +} diff --git a/source/MongoDB/LinqExtensions.cs b/source/MongoDB/LinqExtensions.cs new file mode 100644 index 00000000..e6b9a974 --- /dev/null +++ b/source/MongoDB/LinqExtensions.cs @@ -0,0 +1,185 @@ +using System; +using System.Linq; +using System.Linq.Expressions; +using MongoDB.Linq; + +namespace MongoDB +{ + /// + /// + /// + public static class LinqExtensions + { + /// + /// Counts the specified collection. + /// + /// + /// The collection. + /// The selector. + /// + public static int Count(this IMongoCollection collection, Expression> selector) where T : class + { + return collection.Linq().Count(selector); + } + + /// + /// Deletes the documents according to the selector. + /// + /// + /// The collection. + /// The selector. + [Obsolete("Use Remove instead")] + public static void Delete(this IMongoCollection collection, Expression> selector) where T : class + { + collection.Remove(GetQuery(collection, selector)); + } + + /// + /// Removes the specified collection. + /// + /// + /// The collection. + /// The selector. + public static void Remove(this IMongoCollection collection, Expression> selector) where T : class + { + collection.Remove(GetQuery(collection, selector)); + } + + /// + /// Finds the selectorified collection. + /// + /// + /// The collection. + /// The selector. + /// + public static ICursor Find(this IMongoCollection collection, Expression> selector) where T : class + { + return collection.Find(GetQuery(collection, selector)); + } + + /// + /// Finds the one. + /// + /// + /// The collection. + /// The selector. + /// + public static T FindOne(this IMongoCollection collection, Expression> selector) where T : class + { + return collection.FindOne(GetQuery(collection, selector)); + } + + /// + /// Linqs the selectorified collection. + /// + /// + /// The collection. + /// + public static IQueryable Linq(this IMongoCollection collection) where T : class + { + return new MongoQuery(new MongoQueryProvider(collection.Database, collection.Name)); + } + + /// + /// Linqs the selectorified collection. + /// + /// The collection. + /// + public static IQueryable Linq(this IMongoCollection collection) + { + return new MongoQuery(new MongoQueryProvider(collection.Database, collection.Name)); + } + + /// + /// Updates the selectorified collection. + /// + /// + /// The collection. + /// The document. + /// The selector. + public static void Update(this IMongoCollection collection, object document, Expression> selector) where T : class + { + collection.Update(document, GetQuery(collection, selector)); + } + + /// + /// Updates the selectorified collection. + /// + /// + /// The collection. + /// The document. + /// The selector. + /// if set to true [safe mode]. + public static void Update(this IMongoCollection collection, object document, Expression> selector, bool safeMode) where T : class + { + collection.Update(document, GetQuery(collection, selector), safeMode); + } + + /// + /// Updates the selectorified collection. + /// + /// + /// The collection. + /// The document. + /// The selector. + /// The flags. + public static void Update(this IMongoCollection collection, object document, Expression> selector, UpdateFlags flags) where T : class + { + collection.Update(document, GetQuery(collection, selector), flags); + } + + /// + /// Updates the selectorified collection. + /// + /// + /// The collection. + /// The document. + /// The selector. + /// The flags. + /// if set to true [safe mode]. + public static void Update(this IMongoCollection collection, object document, Expression> selector, UpdateFlags flags, bool safeMode) where T : class + { + collection.Update(document, GetQuery(collection, selector), flags, safeMode); + } + + /// + /// Updates all. + /// + /// + /// The collection. + /// The document. + /// The selector. + public static void UpdateAll(this IMongoCollection collection, object document, Expression> selector) where T : class + { + collection.UpdateAll(document, GetQuery(collection, selector)); + } + + /// + /// Updates all. + /// + /// + /// The collection. + /// The document. + /// The selector. + /// if set to true [safe mode]. + public static void UpdateAll(this IMongoCollection collection, object document, Expression> selector, bool safeMode) where T : class + { + collection.UpdateAll(document, GetQuery(collection, selector), safeMode); + } + + /// + /// Gets the query. + /// + /// + /// The collection. + /// The selector. + /// + private static Document GetQuery(IMongoCollection collection, Expression> selector) where T : class + { + var query = new MongoQuery(new MongoQueryProvider(collection.Database, collection.Name)) + .Where(selector); + var queryObject = ((IMongoQueryable)query).GetQueryObject(); + return queryObject.Query; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/MapReduce.cs b/source/MongoDB/MapReduce.cs new file mode 100644 index 00000000..ec661021 --- /dev/null +++ b/source/MongoDB/MapReduce.cs @@ -0,0 +1,261 @@ +using System; +using System.Collections.Generic; +using MongoDB.Commands; +using MongoDB.Results; + +namespace MongoDB +{ + /// + /// Provides a Fluent interface to build and execute Map/Reduce calls. + /// + public class MapReduce : IDisposable + { + private readonly IMongoDatabase _database; + private readonly Type _rootType; + private bool _disposing; + + /// + /// Initializes a new instance of the class. + /// + /// The database. + /// The name. + /// Type of the root. + public MapReduce(IMongoDatabase database, string name, Type rootType) + { + IsModifiable = true; + if(database == null) + throw new ArgumentNullException("database"); + if(name == null) + throw new ArgumentNullException("name"); + if(rootType == null) + throw new ArgumentNullException("rootType"); + + _rootType = rootType; + _database = database; + Command = new MapReduceCommand(name); + } + + /// + /// Gets a value indicating whether this instance is modifiable. + /// + /// + /// true if this instance is modifiable; otherwise, false. + /// + public bool IsModifiable { get; private set; } + + /// + /// Gets the result. + /// + /// The result. + internal MapReduceResult Result { get; private set; } + + /// + /// Gets the command. + /// + /// The command. + public MapReduceCommand Command { get; private set; } + + /// + /// Gets the documents. + /// + /// The documents. + public IEnumerable Documents + { + get + { + if(Result == null) + RetrieveData(); + if(Result == null || Result.Ok == false) + throw new InvalidOperationException("Documents cannot be iterated when an error was returned from execute."); + + var docs = _database.GetCollection(Result.CollectionName).FindAll().Documents; + using((IDisposable)docs) + { + foreach(var doc in docs) + yield return doc; + } + } + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose() + { + if(Command.KeepTemp || Command.Out != null || _disposing) + return; + + _disposing = true; + + if(Result == null || Result.Ok == false) + return; //Nothing to do. + + //Drop the temporary collection that was created as part of results. + _database.Metadata.DropCollection(Result.CollectionName); + } + + /// + /// The map function references the variable this to inspect the current object under consideration. + /// A map function must call emit(key,value) at least once, but may be invoked any number of times, + /// as may be appropriate. + /// + public MapReduce Map(string function) + { + return Map(new Code(function)); + } + + /// + /// The map function references the variable this to inspect the current object under consideration. + /// A map function must call emit(key,value) at least once, but may be invoked any number of times, + /// as may be appropriate. + /// + public MapReduce Map(Code function) + { + TryModify(); + Command.Map = function; + return this; + } + + /// + /// The reduce function receives a key and an array of values. To use, reduce the received values, + /// and return a result. + /// + /// + /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions + /// must be idempotent. If you need to perform an operation only once, use a finalize function. + /// + public MapReduce Reduce(string function) + { + return Reduce(new Code(function)); + } + + /// + /// The reduce function receives a key and an array of values. To use, reduce the received values, + /// and return a result. + /// + /// + /// The MapReduce engine may invoke reduce functions iteratively; thus, these functions + /// must be idempotent. If you need to perform an operation only once, use a finalize function. + /// + public MapReduce Reduce(Code function) + { + TryModify(); + Command.Reduce = function; + return this; + } + + /// + /// Query filter object + /// + public MapReduce Query(Document query) + { + TryModify(); + Command.Query = query; + return this; + } + + /// + /// Sort the query. Useful for optimization + /// + public MapReduce Sort(Document sort) + { + TryModify(); + Command.Sort = sort; + return this; + } + + /// + /// Number of objects to return from collection + /// + public MapReduce Limit(long limit) + { + TryModify(); + Command.Limit = limit; + return this; + } + + /// + /// Name of the final collection the results should be stored in. + /// + /// + /// A temporary collection is still used and then renamed to the target name atomically. + /// + public MapReduce Out(String name) + { + TryModify(); + Command.Out = name; + return this; + } + + /// + /// When true the generated collection is not treated as temporary. Specifying out automatically makes + /// the collection permanent + /// + public MapReduce KeepTemp(bool keep) + { + TryModify(); + Command.KeepTemp = keep; + return this; + } + + /// + /// Provides statistics on job execution time. + /// + public MapReduce Verbose(bool val) + { + TryModify(); + Command.Verbose = val; + return this; + } + + /// + /// Function to apply to all the results when finished. + /// + public MapReduce Finalize(Code function) + { + TryModify(); + Command.Finalize = function; + return this; + } + + /// + /// Document where fields go into javascript global scope + /// + public MapReduce Scope(Document scope) + { + TryModify(); + Command.Scope = scope; + return this; + } + + /// + /// Retrieves the data. + /// + internal void RetrieveData() + { + if(Command.Command.ContainsKey("map") == false || Command.Command.ContainsKey("reduce") == false) + throw new InvalidOperationException("Cannot execute without a map and reduce function"); + + IsModifiable = false; + + try + { + Result = new MapReduceResult(_database.SendCommand(_rootType, Command.Command)); + } + catch(MongoCommandException exception) + { + Result = new MapReduceResult(exception.Error); + throw new MongoMapReduceException(exception); + } + } + + /// + /// Tries the modify. + /// + private void TryModify() + { + if(IsModifiable == false) + throw new InvalidOperationException("Cannot modify a map/reduce that has already executed"); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Mo.cs b/source/MongoDB/Mo.cs new file mode 100644 index 00000000..333b029b --- /dev/null +++ b/source/MongoDB/Mo.cs @@ -0,0 +1,210 @@ +using System.Collections.Generic; + +namespace MongoDB +{ + /// + /// Staticly typed way of using MongoDB update modifiers. + /// + public class Mo : Document + { + /// + /// Initializes a new instance of the class. + /// + private Mo() + { } + + /// + /// Initializes a new instance of the class. + /// + /// The name. + /// The value. + private Mo(string name,object value){ + Add(name, value); + } + + /// + /// Increments field by the number value. If field is present in the object, + /// otherwise sets field to the number value. + /// + /// The field. + /// The value. + /// + public static Mo Inc(string field,object value){ + return new Mo("$inc", new Document(field, value)); + } + + /// + /// Sets field to value. + /// + /// The field. + /// The value. + /// + /// + /// All datatypes are supported with $set. + /// + public static new Mo Set(string field, object value){ + return new Mo("$set", new Document(field, value)); + } + + /// + /// Deletes a given field. + /// + /// The field. + /// + /// + /// Supported version in MongoDB 1.3 and up. + /// + public static Mo Unset(string field) + { + return new Mo("$unset", new Document(field, 1)); + } + + /// + /// Deletes the given fields. + /// + /// The fields. + /// + /// + /// Supported version in MongoDB 1.3 and up. + /// + public static Mo Unset(IEnumerable fields) + { + var document = new Document(); + + foreach(var field in fields) + document.Add(field, 1); + + return new Mo("$unset", document); + } + + /// + /// Appends value to field, if field is an existing array. + /// Otherwise sets field to the array and add value if field is not present. + /// If field is present but is not an array, an error condition is raised. + /// + /// The field. + /// The value. + /// + public static Mo Push(string field,object value){ + return new Mo("$push", new Document(field, value)); + } + + /// + /// Appends each value in values to field, + /// if field is an existing array. + /// Otherwise sets field to the array values if field is not present. + /// If field is present but is not an array, an error + /// condition is raised. + /// + /// The field. + /// The values. + /// + public static Mo PushAll(string field, IEnumerable values){ + return new Mo("$pushAll", new Document(field, values)); + } + + /// + /// Adds value to the array only if its not in the array already. + /// + /// The field. + /// The value. + /// + public static Mo AddToSet(string field, object value){ + return new Mo("$addToSet", new Document(field, value)); + } + + /// + /// Adds values to the array only if its not in the array already. + /// + /// The field. + /// The values. + /// + public static Mo AddToSet(string field, IEnumerable values){ + return new Mo("$addToSet", new Document(field, new Document("$each", values))); + } + + /// + /// Removes the first element in an array. + /// + /// The field. + /// + /// + /// Supported in MongoDB 1.1 and up. + /// + public static Mo PopFirst(string field){ + return new Mo("$pop", new Document(field, -1)); + } + + /// + /// Removes the last element in an array. + /// + /// The field. + /// + /// + /// Supported in MongoDB 1.1 and up. + /// + public static Mo PopLast(string field) + { + return new Mo("$pop", new Document(field, 1)); + } + + /// + /// Removes all occurrences of value from field, if field is an array. + /// If field is present but is not an array, an error condition is raised. + /// + /// The field. + /// The value. + /// + public static Mo Pull(string field, object value){ + return new Mo("$pull", new Document(field, value)); + } + + /// + /// Removes all occurrences of each value in values from field, + /// if field is an array. + /// If field is present but is not an array, an error condition is raised. + /// + /// The field. + /// The values. + /// + public static Mo PullAll(string field, IEnumerable values){ + return new Mo("$pullAll", new Document(field, values)); + } + + /// + /// Implements the operator &. This is used for conjunctions. + /// + /// The modifier1. + /// The modifier2. + /// The result of the modifier. + public static Mo operator &(Mo modifier1, Mo modifier2){ + var mo = new Mo(); + + //Todo: move as DeepMerge to Document + + foreach(var key in modifier1.Keys) + mo[key] = modifier1[key]; + + foreach(var pair2 in modifier2) + { + object value1; + if(mo.TryGetValue(pair2.Key, out value1)) + { + if(pair2.Value is Document && value1 is Document) + { + mo[pair2.Key] = new Document() + .Merge((Document)value1) + .Merge((Document)pair2.Value); + } + else + mo[pair2.Key] = pair2.Value; + + } + else + mo.Add(pair2.Key, pair2.Value); + } + + return mo; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Mongo.cs b/source/MongoDB/Mongo.cs new file mode 100644 index 00000000..56b52bbf --- /dev/null +++ b/source/MongoDB/Mongo.cs @@ -0,0 +1,135 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Configuration; +using MongoDB.Connections; + +namespace MongoDB +{ + /// + /// Description of Mongo. + /// + public class Mongo : IDisposable, IMongo + { + private readonly MongoConfiguration _configuration; + private readonly Connection _connection; + + /// + /// Initializes a new instance of the class. + /// + public Mongo() + : this(new MongoConfiguration()) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The connection string. + public Mongo(string connectionString) + : this(new MongoConfiguration {ConnectionString = connectionString}) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The mongo configuration. + public Mongo(MongoConfiguration configuration){ + if(configuration == null) + throw new ArgumentNullException("configuration"); + + configuration.ValidateAndSeal(); + + _configuration = configuration; + _connection = ConnectionFactoryFactory.GetConnection(configuration.ConnectionString); + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose() + { + _connection.Dispose(); + } + + /// + /// Gets the connection string. + /// + /// The connection string. + public string ConnectionString + { + get { return _connection.ConnectionString; } + } + + /// + /// Gets the named database. + /// + /// The name. + /// + public IMongoDatabase GetDatabase(String name) + { + return new MongoDatabase(_configuration, _connection, name); + } + + /// + /// Gets the with the specified name. + /// + /// + public IMongoDatabase this[String name] + { + get { return GetDatabase(name); } + } + + /// + /// Connects to server. + /// + /// + /// Thrown when connection fails. + public void Connect() + { + _connection.Open(); + } + + /// + /// Tries to connect to server. + /// + /// + public bool TryConnect() + { + try + { + _connection.Open(); + return _connection.IsConnected; + } + catch(MongoException) + { + return _connection.IsConnected; + } + } + + /// + /// Disconnects this instance. + /// + /// + public bool Disconnect() + { + _connection.Close(); + return _connection.IsConnected; + } + + /// + /// Gets the databases. + /// + /// + public IEnumerable GetDatabases() + { + var result = _connection.SendCommand(_configuration.SerializationFactory, "admin", typeof(Document), new Document("listDatabases", 1)); + + return ((IEnumerable)result["databases"]) + .Select(database => (string)database["name"]) + .Select(name => new MongoDatabase(_configuration, _connection, name)) + .Cast(); + } + } +} diff --git a/source/MongoDB/MongoCollection_1.cs b/source/MongoDB/MongoCollection_1.cs new file mode 100644 index 00000000..c8976cb7 --- /dev/null +++ b/source/MongoDB/MongoCollection_1.cs @@ -0,0 +1,632 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using MongoDB.Configuration; +using MongoDB.Connections; +using MongoDB.Protocol; +using MongoDB.Results; +using MongoDB.Util; + +namespace MongoDB +{ + /// + /// + /// + public class MongoCollection : IMongoCollection where T : class + { + private readonly MongoConfiguration _configuration; + private readonly Connection _connection; + private MongoDatabase _database; + private CollectionMetadata _metadata; + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The connection. + /// Name of the database. + /// The name. + internal MongoCollection(MongoConfiguration configuration, Connection connection, string databaseName, string collectionName) + { + //Todo: add public constructors for users to call + Name = collectionName; + DatabaseName = databaseName; + _configuration = configuration; + _connection = connection; + } + + /// + /// Gets the database. + /// + /// The database. + public IMongoDatabase Database { + get { return _database ?? (_database = new MongoDatabase(_configuration, _connection, DatabaseName)); } + } + + /// + /// Gets or sets the name. + /// + /// The name. + public string Name { get; private set; } + + /// + /// Gets or sets the name of the database. + /// + /// The name of the database. + public string DatabaseName { get; private set; } + + /// + /// Gets the full name including database name. + /// + /// The full name. + public string FullName { + get { return DatabaseName + "." + Name; } + } + + /// + /// Gets the meta data. + /// + /// The meta data. + public CollectionMetadata Metadata { + get { return _metadata ?? (_metadata = new CollectionMetadata(_configuration, DatabaseName, Name, _connection)); } + } + + /// + /// Finds and returns the first document in a selector query. + /// + /// The where. + /// + /// A from the collection. + /// + public T FindOne(string javascriptWhere) + { + var spec = new Document { { "$where", new Code(javascriptWhere) } }; + using(var cursor = Find(spec, -1, 0, null)) + return cursor.Documents.FirstOrDefault(); + } + + /// + /// Finds and returns the first document in a query. + /// + /// A representing the query. + /// + /// A from the collection. + /// + public T FindOne(object spec){ + using(var cursor = Find(spec, -1, 0, null)) + return cursor.Documents.FirstOrDefault(); + } + + /// + /// Finds all. + /// + /// + public ICursor FindAll(){ + var spec = new Document(); + return Find(spec, 0, 0, null); + } + + /// + /// Finds the specified where. + /// + /// The where. + /// + public ICursor Find(string javascriptWhere){ + var spec = new Document { { "$where", new Code(javascriptWhere) } }; + return Find(spec, 0, 0, null); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// + public ICursor Find(object spec){ + return Find(spec, 0, 0, null); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// + /// A + public ICursor Find(object spec, object fields){ + return Find(spec, 0, 0, fields); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// + public ICursor Find(object spec, int limit, int skip){ + return Find(spec, limit, skip, null); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// The fields. + /// + public ICursor Find(object spec, int limit, int skip, object fields){ + if (spec == null) + spec = new Document(); + return new Cursor(_configuration.SerializationFactory, _configuration.MappingStore, _connection, DatabaseName, Name, spec, limit, skip, fields); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// A + public T FindAndModify(object document, object spec){ + return FindAndModify(document, spec, false); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// A + /// + public T FindAndModify(object document, object spec, object sort) + { + return FindAndModify(document, spec, sort, null, false, false, false); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// if set to true [return new]. + /// A + public T FindAndModify(object document, object spec, bool returnNew) + { + return FindAndModify(document, spec, null, null, false, returnNew, false); + } + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// if set to true [return new]. + /// A + public T FindAndModify(object document, object spec, object sort, bool returnNew) + { + return FindAndModify(document, spec, sort, null, false, returnNew, false); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// The fields. + /// if set to true [remove]. + /// if set to true [return new]. + /// if set to true [upsert]. + /// A + public T FindAndModify(object document, object spec, object sort, object fields, bool remove, bool returnNew, bool upsert) + { + try + { + var command = new Document + { + {"findandmodify", Name}, + {"query", spec}, + {"update", EnsureUpdateDocument(document)}, + {"new", returnNew}, + {"remove", remove}, + {"upsert", upsert} + }; + + if(sort != null) + command.Add("sort", sort); + if(fields != null) + command.Add("fields", fields); + + var response = _connection.SendCommand>(_configuration.SerializationFactory, + DatabaseName, + typeof(T), + command); + + return response.Value; + } + catch(MongoCommandException) + { + // This is when there is no document to operate on + return null; + } + } + + /// + /// Entrypoint into executing a map/reduce query against the collection. + /// + /// A + public MapReduce MapReduce(){ + return new MapReduce(Database, Name, typeof(T)); + } + + /// + /// Count all items in the collection. + /// + public long Count(){ + return Count(new Document()); + } + + /// + /// Count all items in a collection that match the query spec. + /// + /// The spec. + /// + /// + /// It will return 0 if the collection doesn't exist yet. + /// + public long Count(object spec){ + try { + var response = Database.SendCommand(typeof(T),new Document().Add("count", Name).Add("query", spec)); + return Convert.ToInt64((double)response["n"]); + } catch (MongoCommandException) { + //FIXME This is an exception condition when the namespace is missing. + //-1 might be better here but the console returns 0. + return 0; + } + } + + /// + /// Inserts the Document into the collection. + /// + public void Insert(object document, bool safemode){ + Insert(document); + CheckError(safemode); + } + + /// + /// Inserts the specified doc. + /// + /// The doc. + public void Insert(object document){ + Insert(new[] { document }); + } + + /// + /// Inserts all. + /// + /// The type of the element. + /// The documents. + /// if set to true [safemode]. + public void Insert(IEnumerable documents, bool safemode){ + if (safemode) + Database.ResetError(); + Insert(documents); + CheckPreviousError(safemode); + } + + /// + /// Inserts the specified documents. + /// + /// The documents. + public void Insert(IEnumerable documents){ + if(documents is Document) + { + Insert(new[]{(Document)documents}); + return; + } + + var rootType = typeof(T); + var writerSettings = _configuration.SerializationFactory.GetBsonWriterSettings(rootType); + + var insertMessage = new InsertMessage(writerSettings) + { + FullCollectionName = FullName + }; + + var descriptor = _configuration.SerializationFactory.GetObjectDescriptor(rootType); + var insertDocument = new List(); + + foreach (var document in documents) { + var id = descriptor.GetPropertyValue(document, "_id"); + + if (id == null) + descriptor.SetPropertyValue(document, "_id", descriptor.GenerateId(document)); + + insertDocument.Add(document); + } + + insertMessage.Documents = insertDocument.ToArray(); + + try { + _connection.SendMessage(insertMessage,DatabaseName); + } catch (IOException exception) { + throw new MongoConnectionException("Could not insert document, communication failure", _connection, exception); + } + } + + /// + /// Deletes documents from the collection according to the spec. + /// + /// The selector. + /// if set to true [safemode]. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// + [Obsolete("Use Remove instead")] + public void Delete(object selector, bool safemode) + { + Delete(selector); + CheckError(safemode); + } + + /// + /// Remove documents from the collection according to the selector. + /// + /// The selector. + /// if set to true [safemode]. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// See the safemode description in the class description + /// + public void Remove(object selector, bool safemode){ + Remove(selector); + CheckError(safemode); + } + + /// + /// Deletes documents from the collection according to the spec. + /// + /// The selector. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// + [Obsolete("Use Remove instead")] + public void Delete(object selector){ + var writerSettings = _configuration.SerializationFactory.GetBsonWriterSettings(typeof(T)); + + try { + _connection.SendMessage(new DeleteMessage(writerSettings) + { + FullCollectionName = FullName, + Selector = selector + },DatabaseName); + } catch (IOException exception) { + throw new MongoConnectionException("Could not delete document, communication failure", _connection, exception); + } + } + + /// + /// Remove documents from the collection according to the selector. + /// + /// The selector. + /// + /// An empty document will match all documents in the collection and effectively truncate it. + /// + public void Remove(object selector){ + var writerSettings = _configuration.SerializationFactory.GetBsonWriterSettings(typeof(T)); + + try + { + _connection.SendMessage(new DeleteMessage(writerSettings) + { + FullCollectionName = FullName, + Selector = selector + }, DatabaseName); + } + catch(IOException exception) + { + throw new MongoConnectionException("Could not delete document, communication failure", _connection, exception); + } + } + + /// + /// Updates the specified document. + /// + /// The document. + /// if set to true [safemode]. + [Obsolete("Use Save instead")] + public void Update(object document, bool safemode) + { + Save(document, safemode); + } + + /// + /// Updates a document with the data in doc as found by the selector. + /// + /// The document. + /// + /// _id will be used in the document to create a selector. If it isn't in + /// the document then it is assumed that the document is new and an upsert is sent to the database + /// instead. + /// + [Obsolete("Use Save(Document)")] + public void Update(object document){ + Save(document); + } + + /// + /// Updates the specified document. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + public void Update(object document, object selector, bool safemode){ + Update(document, selector, 0, safemode); + } + + /// + /// Updates a document with the data in doc as found by the selector. + /// + /// The document. + /// The selector. + public void Update(object document, object selector){ + Update(document, selector, 0); + } + + /// + /// Updates the specified document. + /// + /// The document. + /// The selector. + /// The flags. + /// if set to true [safemode]. + public void Update(object document, object selector, UpdateFlags flags, bool safemode){ + Update(document, selector, flags); + CheckError(safemode); + } + + /// + /// Updates a document with the data in doc as found by the selector. + /// + /// The to update with + /// The query spec to find the document to update. + /// + public void Update(object document, object selector, UpdateFlags flags){ + var writerSettings = _configuration.SerializationFactory.GetBsonWriterSettings(typeof(T)); + + try { + _connection.SendMessage(new UpdateMessage(writerSettings) + { + FullCollectionName = FullName, + Selector = selector, + Document = document, + Flags = (int)flags + }, DatabaseName); + } catch (IOException exception) { + throw new MongoConnectionException("Could not update document, communication failure", _connection, exception); + } + } + + /// + /// Runs a multiple update query against the database. It will wrap any + /// doc with $set if the passed in doc doesn't contain any '$' ops. + /// + /// The document. + /// The selector. + public void UpdateAll(object document, object selector){ + Update(EnsureUpdateDocument(document), selector, UpdateFlags.MultiUpdate); + } + + /// + /// Updates all. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + public void UpdateAll(object document, object selector, bool safemode){ + if (safemode) + Database.ResetError(); + UpdateAll(document, selector); + CheckPreviousError(safemode); + } + + /// + /// Saves a document to the database using an upsert. + /// + /// The document. + /// + /// The document will contain the _id that is saved to the database. This is really just an alias + /// to Update(Document) to maintain consistency between drivers. + /// + public void Save(object document){ + //Try to generate a selector using _id for an existing document. + //otherwise just set the upsert flag to 1 to insert and send onward. + + var descriptor = _configuration.SerializationFactory.GetObjectDescriptor(typeof(T)); + + var value = descriptor.GetPropertyValue(document, "_id"); + + if(value == null) + { + //Likely a new document + descriptor.SetPropertyValue(document, "_id", descriptor.GenerateId(value)); + + Insert(document); + } + else + Update(document, new Document("_id", value), UpdateFlags.Upsert); + } + + /// + /// Saves a document to the database using an upsert. + /// + /// The document. + /// if set to true [safemode]. + /// + /// The document will contain the _id that is saved to the database. This is really just an alias + /// to Update(Document) to maintain consistency between drivers. + /// + public void Save(object document, bool safemode) + { + Save(document); + CheckError(safemode); + } + + /// + /// Checks the error. + /// + /// if set to true [safemode]. + private void CheckError(bool safemode){ + if (!safemode) + return; + + var lastError = Database.GetLastError(); + + if (ErrorTranslator.IsError(lastError)) + throw ErrorTranslator.Translate(lastError); + } + + /// + /// Checks the previous error. + /// + /// if set to true [safemode]. + private void CheckPreviousError(bool safemode){ + if (!safemode) + return; + + var previousError = Database.GetPreviousError(); + + if (ErrorTranslator.IsError(previousError)) + throw ErrorTranslator.Translate(previousError); + } + + /// + /// Ensures the update document. + /// + /// The document. + /// + private object EnsureUpdateDocument(object document) + { + var descriptor = _configuration.SerializationFactory.GetObjectDescriptor(typeof(T)); + + var foundOp = descriptor.GetMongoPropertyNames(document) + .Any(name => name.IndexOf('$') == 0); + + if(foundOp == false) + { + //wrap document in a $set. + return new Document().Add("$set", document); + } + + return document; + } + } +} diff --git a/source/MongoDB/MongoConnectionStringBuilder.cs b/source/MongoDB/MongoConnectionStringBuilder.cs new file mode 100644 index 00000000..77424192 --- /dev/null +++ b/source/MongoDB/MongoConnectionStringBuilder.cs @@ -0,0 +1,433 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; + +namespace MongoDB +{ + /// + /// + [Serializable] + public class MongoConnectionStringBuilder + { + /// + /// + public const string DefaultDatabase = "admin"; + + /// + /// + public const int DefaultMaximumPoolSize = 100; + + /// + /// + public const int DefaultMinimumPoolSize = 0; + + /// + /// + public const bool DefaultPooled = true; + + /// + /// + public static readonly TimeSpan DefaultConnectionLifeTime = TimeSpan.Zero; + + /// + /// + public static readonly TimeSpan DefaultConnectionTimeout = TimeSpan.FromSeconds(15); + + private static readonly Regex PairRegex = new Regex(@"^\s*(.*)\s*=\s*(.*)\s*$"); + private static readonly Regex ServerRegex = new Regex(@"^\s*([^:]+)(?::(\d+))?\s*$"); + private static readonly Regex UriRegex = new Regex(@"^mongodb://(?:([^:]*):([^@]*)@)?([^/]*)(?:/([^?]*))?(?:\?(.*))?$"); + + private readonly List _servers = new List(); + + /// + /// Initializes a new instance of the + /// + /// class. Uses the default server connection when + /// no server is added. + /// + public MongoConnectionStringBuilder() + { + ConnectionLifetime = DefaultConnectionLifeTime; + ConnectionTimeout = DefaultConnectionTimeout; + MaximumPoolSize = DefaultMaximumPoolSize; + MinimumPoolSize = DefaultMinimumPoolSize; + Pooled = DefaultPooled; + Database = DefaultDatabase; + } + + /// + /// Initializes a new instance of the + /// + /// class. Uses the default server connection when + /// no server is added. + /// + /// The connection string. + public MongoConnectionStringBuilder(string connectionString) + : this() + { + if(!string.IsNullOrEmpty(connectionString)) + if(connectionString.StartsWith("mongodb://")) + ParseUri(connectionString); + else + Parse(connectionString); + } + + /// + /// Gets the servers. + /// + /// The servers. + public MongoServerEndPoint[] Servers + { + get { return _servers.Count == 0 ? new[] {MongoServerEndPoint.Default} : _servers.ToArray(); } + } + + /// + /// Gets or sets the password. + /// + /// The password. + public string Password { get; set; } + + /// + /// Gets or sets the username. + /// + /// The username. + public string Username { get; set; } + + /// + /// Gets or sets the maximum size of the connection pool. + /// + /// The maximum size of the pool. + public int MaximumPoolSize { get; set; } + + /// + /// Gets or sets the size of the minimum connection pool. + /// + /// The size of the minimal pool. + public int MinimumPoolSize { get; set; } + + /// + /// Gets or sets the connection lifetime in connection pool. + /// + /// The connection lifetime. + public TimeSpan ConnectionLifetime { get; set; } + + /// + /// Gets or sets the connection timeout. + /// + /// The connection timeout. + public TimeSpan ConnectionTimeout { get; set; } + + /// + /// Gets or sets a value indicating whether connection is pooled. + /// + /// true if pooled; otherwise, false. + public bool Pooled { get; set; } + + /// + /// Gets or sets the database. + /// + /// + /// Is only used when passing directly constructing MongoDatabase instance. + /// + /// The database. + public string Database { get; set; } + + /// + /// Parses the URI. + /// + /// The connection string. + private void ParseUri(string connectionString) + { + if(connectionString == null) + throw new ArgumentNullException("connectionString"); + + var uriMatch = UriRegex.Match(connectionString); + + if(!uriMatch.Success) + throw new FormatException(string.Format("Invalid connection string: {0}", connectionString)); + + var username = uriMatch.Groups[1].Value; + if(!string.IsNullOrEmpty(username)) + Username = username; + + var password = uriMatch.Groups[2].Value; + if(!string.IsNullOrEmpty(password)) + Password = password; + + var servers = uriMatch.Groups[3].Value; + if(!string.IsNullOrEmpty(servers)) + ParseServers(servers); + + var database = uriMatch.Groups[4].Value; + if(!string.IsNullOrEmpty(database)) + Database = database; + + var values = uriMatch.Groups[5].Value; + if(!string.IsNullOrEmpty(values)) + foreach(var pair in values.Split('&')) + ParseValuePair(pair); + } + + /// + /// Parses the specified connection string. + /// + /// The connection string. + private void Parse(string connectionString) + { + if(connectionString == null) + throw new ArgumentNullException("connectionString"); + + var segments = connectionString.Split(';'); + + foreach(var segment in segments) + ParseValuePair(segment); + } + + /// + /// Parses the value pair. + /// + /// The pair. + private void ParseValuePair(string pair) + { + var pairMatch = PairRegex.Match(pair); + if(!pairMatch.Success) + throw new FormatException(string.Format("Invalid connection string on: {0}", pairMatch.Value)); + + var key = pairMatch.Groups[1].Value; + var value = pairMatch.Groups[2].Value; + + ParseValuePair(key, value); + } + + /// + /// Parses the specified key. + /// + /// The key. + /// The value. + private void ParseValuePair(string key, string value) + { + switch(key.ToLower()) + { + case "username": + case "user id": + case "user": + { + Username = value; + break; + } + case "password": + { + Password = value; + break; + } + case "pooled": + { + try + { + Pooled = bool.Parse(value); + } + catch(FormatException exception) + { + throw new FormatException("Invalid string for Pooled in connection string", exception); + } + break; + } + case "database": + case "data source": + { + Database = value; + break; + } + case "maximumpoolsize": + case "max pool size": + { + try + { + MaximumPoolSize = int.Parse(value); + } + catch(FormatException exception) + { + throw new FormatException("Invalid number for MaximumPoolSize in connection string", exception); + } + break; + } + case "minimumpoolsize": + case "min pool size": + { + try + { + MinimumPoolSize = int.Parse(value); + } + catch(FormatException exception) + { + throw new FormatException("Invalid number for MinimumPoolSize in connection string", exception); + } + break; + } + case "connectionlifetime": + case "connection lifetime": + { + try + { + var seconds = double.Parse(value); + + ConnectionLifetime = seconds > 0 ? TimeSpan.FromSeconds(seconds) : DefaultConnectionLifeTime; + } + catch(FormatException exception) + { + throw new FormatException("Invalid number for ConnectionLifetime in connection string", exception); + } + break; + } + case "connectiontimeout": + case "connecttimeout": + { + try + { + var seconds = double.Parse(value); + + ConnectionTimeout = seconds > 0 ? TimeSpan.FromSeconds(seconds) : DefaultConnectionTimeout; + } + catch(FormatException exception) + { + throw new FormatException("Invalid number for ConnectionTimeout in connection string", exception); + } + break; + } + case "server": + case "servers": + { + ParseServers(value); + + break; + } + default: + throw new FormatException(string.Format("Unknown connection string option: {0}", key)); + } + } + + /// + /// Parses the servers. + /// + /// The value. + private void ParseServers(string value) + { + var servers = value.Split(','); + + foreach(var serverMatch in servers.Select(server => ServerRegex.Match(server))) + { + if(!serverMatch.Success) + throw new FormatException(string.Format("Invalid server in connection string: {0}", serverMatch.Value)); + + var serverHost = serverMatch.Groups[1].Value; + + int port; + if(int.TryParse(serverMatch.Groups[2].Value, out port)) + AddServer(serverHost, port); + else + AddServer(serverHost); + } + } + + /// + /// Adds the server. + /// + /// The end point. + public void AddServer(MongoServerEndPoint endPoint) + { + if(endPoint == null) + throw new ArgumentNullException("endPoint"); + + _servers.Add(endPoint); + } + + /// + /// Clears the servers. + /// + public void ClearServers() + { + _servers.Clear(); + } + + /// + /// Adds the server with the given host and default port. + /// + /// The host. + public void AddServer(string host) + { + AddServer(new MongoServerEndPoint(host)); + } + + /// + /// Adds the server with the given host and port. + /// + /// The host. + /// The port. + public void AddServer(string host, int port) + { + AddServer(new MongoServerEndPoint(host, port)); + } + + /// + /// Returns a + /// + /// that represents this instance. + /// + /// A + /// + /// that represents this instance. + public override string ToString() + { + var builder = new StringBuilder(); + + if(!string.IsNullOrEmpty(Username)) + builder.AppendFormat("Username={0};", Username); + + if(!string.IsNullOrEmpty(Password)) + builder.AppendFormat("Password={0};", Password); + + if(_servers.Count > 0) + { + builder.Append("Server="); + + foreach(var server in _servers) + { + builder.Append(server.Host); + + if(server.Port != MongoServerEndPoint.DefaultPort) + builder.AppendFormat(":{0}", server.Port); + + builder.Append(','); + } + + // remove last , + builder.Remove(builder.Length - 1, 1); + + builder.Append(';'); + } + + if(Pooled != true) + builder.AppendFormat("Pooled={0};", Pooled); + + if(MaximumPoolSize != DefaultMaximumPoolSize) + builder.AppendFormat("MaximumPoolSize={0};", MaximumPoolSize); + + if(MinimumPoolSize != DefaultMinimumPoolSize) + builder.AppendFormat("MinimumPoolSize={0};", MinimumPoolSize); + + if(ConnectionTimeout != DefaultConnectionTimeout) + builder.AppendFormat("ConnectionTimeout={0};", ConnectionTimeout.TotalSeconds); + + if(ConnectionLifetime != DefaultConnectionLifeTime) + builder.AppendFormat("ConnectionLifetime={0};", ConnectionLifetime.TotalSeconds); + + // remove last ; + if(builder.Length > 0) + builder.Remove(builder.Length - 1, 1); + + return builder.ToString(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/MongoDB.csproj b/source/MongoDB/MongoDB.csproj new file mode 100644 index 00000000..78f966c1 --- /dev/null +++ b/source/MongoDB/MongoDB.csproj @@ -0,0 +1,385 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} + Library + MongoDB + C:\Documents and Settings\scorder\Application Data\ICSharpCode/SharpDevelop3.0\Settings.SourceAnalysis + True + False + False + false + + + 3.5 + + + false + v3.5 + MongoDB.Driver + v3.5 + MongoDB + true + ..\..\StrongName.snk + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + true + + + true + full + false + bin\Debug + DEBUG;CODE_ANALYSIS;TRACE + prompt + 4 + false + bin\Debug\MongoDB.xml + Off + AllRules.ruleset + false + 0618 + + + none + false + ..\Release + prompt + 4 + false + true + bin\Release\MongoDB.xml + AllRules.ruleset + false + + + + + none + false + bin\Release\ + prompt + 4 + false + AllRules.ruleset + true + + + False + + + False + Auto + 4194304 + AnyCPU + 4096 + + + + + 3.5 + + + + + + + + AssemblyInfoGlobal.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + + + false + + + False + + + false + + + False + + + false + + + False + .NET Framework 3.5 SP1 + false + + + + + StrongName.snk + + + + + \ No newline at end of file diff --git a/source/MongoDB/MongoDatabase.cs b/source/MongoDB/MongoDatabase.cs new file mode 100644 index 00000000..bf82347b --- /dev/null +++ b/source/MongoDB/MongoDatabase.cs @@ -0,0 +1,314 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Configuration; +using MongoDB.Connections; +using MongoDB.Results; + +namespace MongoDB +{ + /// + /// + public class MongoDatabase : IMongoDatabase + { + private readonly MongoConfiguration _configuration; + private readonly Connection _connection; + private DatabaseJavascript _javascript; + private DatabaseMetadata _metadata; + + /// + /// Initializes a new instance of the class. + /// + /// The connection string. + public MongoDatabase(string connectionString) + : this(new MongoConfiguration {ConnectionString = connectionString}) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + public MongoDatabase(MongoConfiguration configuration) + : this(configuration, + ConnectionFactoryFactory.GetConnection(configuration.ConnectionString), + new MongoConnectionStringBuilder(configuration.ConnectionString).Database) + { + //Todo: Add check for null + } + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The conn. + /// The name. + internal MongoDatabase(MongoConfiguration configuration, Connection connection, string name) + { + if(configuration == null) + throw new ArgumentNullException("configuration"); + if(connection == null) + throw new ArgumentNullException("connection"); + if(name == null) + throw new ArgumentNullException("name"); + + Name = name; + _configuration = configuration; + _connection = connection; + } + + /// + /// Gets or sets the name. + /// + /// The name. + public string Name { get; private set; } + + /// + /// Gets the meta data. + /// + /// The meta data. + public DatabaseMetadata Metadata + { + get { return _metadata ?? (_metadata = new DatabaseMetadata(_configuration, Name, _connection)); } + } + + /// + /// Gets the javascript. + /// + /// The javascript. + public DatabaseJavascript Javascript + { + get { return _javascript ?? (_javascript = new DatabaseJavascript(this)); } + } + + /// + /// Gets the with the specified name. + /// + /// + public IMongoCollection this[String name] + { + get { return GetCollection(name); } + } + + /// + /// Gets the collection names. + /// + /// + public List GetCollectionNames() + { + var namespaces = this["system.namespaces"]; + var cursor = namespaces.Find(new Document()); + //Todo: Should filter built-ins + return cursor.Documents.Select(d => (String)d["name"]).ToList(); + } + + /// + /// Gets the collection. + /// + /// The name. + /// + public IMongoCollection GetCollection(string name) + { + return new MongoCollection(_configuration, _connection, Name, name); + } + + /// + /// Gets the collection. + /// + /// + /// The name. + /// + public IMongoCollection GetCollection(String name) where T : class + { + return new MongoCollection(_configuration, _connection, Name, name); + } + + /// + /// Gets the collection. + /// + /// + /// + public IMongoCollection GetCollection() where T : class + { + var collectionName = _configuration.SerializationFactory.GetCollectionName(typeof(T)); + return GetCollection(collectionName); + } + + /// + /// Gets the document that a reference is pointing to. + /// + /// The reference. + /// + public Document FollowReference(DBRef reference) + { + if(reference == null) + throw new ArgumentNullException("reference", "cannot be null"); + var query = new Document().Add("_id", reference.Id); + return this[reference.CollectionName].FindOne(query); + } + + /// + /// Follows the reference. + /// + /// + /// The reference. + /// + public T FollowReference(DBRef reference) where T : class + { + if(reference == null) + throw new ArgumentNullException("reference", "cannot be null"); + var query = new Document().Add("_id", reference.Id); + return GetCollection(reference.CollectionName).FindOne(query); + } + + /// + /// Most operations do not have a return code in order to save the client from having to wait for results. + /// GetLastError can be called to retrieve the return code if clients want one. + /// + /// + public Document GetLastError() + { + return SendCommand("getlasterror"); + } + + /// + /// Retrieves the last error and forces the database to fsync all files before returning. + /// + /// if set to true [fsync]. + /// + /// + /// Server version 1.3+ + /// + public Document GetLastError(bool fsync) + { + return SendCommand(new Document {{"getlasterror", 1.0}, {"fsync", fsync}}); + } + + /// + /// Call after sending a bulk operation to the database. + /// + /// + public Document GetPreviousError() + { + return SendCommand("getpreverror"); + } + + /// + /// Gets the sister database on the same Mongo connection with the given name. + /// + /// Name of the sister database. + /// + public MongoDatabase GetSisterDatabase(string sisterDatabaseName) + { + return new MongoDatabase(_configuration, _connection, sisterDatabaseName); + } + + /// + /// Resets last error. This is good to call before a bulk operation. + /// + public void ResetError() + { + SendCommand("reseterror"); + } + + /// + /// Evals the specified javascript. + /// + /// The javascript. + /// + public Document Eval(string javascript) + { + return Eval(javascript, new Document()); + } + + /// + /// Evals the specified javascript. + /// + /// The javascript. + /// The scope. + /// + public Document Eval(string javascript, Document scope) + { + return Eval(new CodeWScope(javascript, scope)); + } + + /// + /// Evals the specified code scope. + /// + /// The code scope. + /// + public Document Eval(CodeWScope codeScope) + { + var cmd = new Document().Add("$eval", codeScope); + return SendCommand(cmd); + } + + /// + /// Sends the command. + /// + /// The command name. + /// + public Document SendCommand(string commandName) + { + return SendCommand(new Document().Add(commandName, 1.0)); + } + + /// + /// Sends the command. + /// + /// The CMD. + /// + public Document SendCommand(Document command) + { + return SendCommand(typeof(Document), command); + } + + /// + /// Sends the command. + /// + /// Type of serialization root. + /// The CMD. + /// + public Document SendCommand(Type rootType, Document command) + { + return _connection.SendCommand(_configuration.SerializationFactory, Name, rootType, command); + } + + /// + /// Sends the command. + /// + /// + /// Name of the command. + /// + public T SendCommand(string commandName) + where T : CommandResultBase + { + return SendCommand(new Document().Add(commandName, 1.0)); + } + + /// + /// Sends the command. + /// + /// + /// The command. + /// + public T SendCommand(object command) + where T : CommandResultBase + { + return _connection.SendCommand(_configuration.SerializationFactory, Name, typeof(T), command); + } + + /// + /// Sends the command. + /// + /// + /// Type of serialization root. + /// The command. + /// + public T SendCommand(Type rootType, object command) + where T : CommandResultBase + { + return _connection.SendCommand(_configuration.SerializationFactory, Name, rootType, command); + } + } +} \ No newline at end of file diff --git a/MongoDBDriver/MongoMaxKey.cs b/source/MongoDB/MongoMaxKey.cs similarity index 95% rename from MongoDBDriver/MongoMaxKey.cs rename to source/MongoDB/MongoMaxKey.cs index 551d62eb..fe172f21 100644 --- a/MongoDBDriver/MongoMaxKey.cs +++ b/source/MongoDB/MongoMaxKey.cs @@ -1,4 +1,4 @@ -namespace MongoDB.Driver +namespace MongoDB { /// /// Class representing the MaxKey Bson type. It will always compare higher than any other type. diff --git a/MongoDBDriver/MongoMinKey.cs b/source/MongoDB/MongoMinKey.cs similarity index 95% rename from MongoDBDriver/MongoMinKey.cs rename to source/MongoDB/MongoMinKey.cs index 1db7b7f6..e41f0ef9 100644 --- a/MongoDBDriver/MongoMinKey.cs +++ b/source/MongoDB/MongoMinKey.cs @@ -1,4 +1,4 @@ -namespace MongoDB.Driver +namespace MongoDB { /// /// Class representing the MinKey Bson type. It will always compare lower than any other type. diff --git a/source/MongoDB/MongoRegex.cs b/source/MongoDB/MongoRegex.cs new file mode 100644 index 00000000..11bc7b67 --- /dev/null +++ b/source/MongoDB/MongoRegex.cs @@ -0,0 +1,288 @@ +using System; +using System.Text.RegularExpressions; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; + +namespace MongoDB +{ + /// + /// + [Serializable] + public sealed class MongoRegex : IEquatable, IXmlSerializable + { + /// + /// Initializes a new instance of the class. + /// + public MongoRegex() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The expression. + public MongoRegex(string expression) + : this(expression, string.Empty) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The expression. + /// The options. + public MongoRegex(string expression, MongoRegexOption options) + { + Expression = expression; + Options = options; + } + + /// + /// Initializes a new instance of the class. + /// + /// The Regex expression. + /// The Regex options. + public MongoRegex(string expression, RegexOptions options) + : this(new Regex(expression, options)) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The regex. + public MongoRegex(Regex regex) + { + if(regex == null) + throw new ArgumentNullException("regex"); + + Expression = regex.ToString(); + + ToggleOption("i", (regex.Options & RegexOptions.IgnoreCase) != 0); + ToggleOption("m", (regex.Options & RegexOptions.Multiline) != 0); + ToggleOption("g", (regex.Options & RegexOptions.IgnorePatternWhitespace) != 0); + } + + /// + /// Initializes a new instance of the class. + /// + /// The expression. + /// The options. + public MongoRegex(string expression, string options) + { + Expression = expression; + RawOptions = options; + } + + /// + /// A valid regex string including the enclosing / characters. + /// + public string Expression { get; set; } + + /// + /// Gets or sets the options. + /// + /// The options. + public MongoRegexOption Options + { + get + { + var options = MongoRegexOption.None; + + if(RawOptions != null) + { + if(RawOptions.Contains("i")) + options = options | MongoRegexOption.IgnoreCase; + if(RawOptions.Contains("m")) + options = options | MongoRegexOption.Multiline; + if(RawOptions.Contains("g")) + options = options | MongoRegexOption.IgnorePatternWhitespace; + } + + return options; + } + set + { + ToggleOption("i", (value & MongoRegexOption.IgnoreCase) != 0); + ToggleOption("m", (value & MongoRegexOption.Multiline) != 0); + ToggleOption("g", (value & MongoRegexOption.IgnorePatternWhitespace) != 0); + } + } + + /// + /// A string that may contain only the characters 'g', 'i', and 'm'. + /// Because the JS and TenGen representations support a limited range of options, + /// any nonconforming options will be dropped when converting to this representation + /// + public string RawOptions { get; set; } + + /// + /// Builds a .Net Regex. + /// + /// + public Regex BuildRegex() + { + var options = RegexOptions.None; + + if(RawOptions != null) + { + if(RawOptions.Contains("i")) + options = options | RegexOptions.IgnoreCase; + if(RawOptions.Contains("m")) + options = options | RegexOptions.Multiline; + if(RawOptions.Contains("g")) + options = options | RegexOptions.IgnorePatternWhitespace; + } + + return new Regex(Expression,options); + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(MongoRegex other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + return Equals(other.Expression, Expression) && Equals(other.RawOptions, RawOptions); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + if(reader.MoveToAttribute("options")) + RawOptions = reader.Value; + + if(reader.IsEmptyElement) + return; + + Expression = reader.ReadString(); + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + if(RawOptions != null) + writer.WriteAttributeString("options", RawOptions); + + if(Expression == null) + return; + + writer.WriteString(Expression); + } + + /// + /// Toggles the option. + /// + /// The option. + /// if set to true [enabled]. + private void ToggleOption(string option, bool enabled) + { + if(RawOptions == null) + RawOptions = string.Empty; + + if(enabled) + { + if(RawOptions.Contains(option)) + return; + RawOptions += option; + } + else + { + if(!RawOptions.Contains(option)) + return; + RawOptions = RawOptions.Replace(option, string.Empty); + } + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(MongoRegex) && Equals((MongoRegex)obj); + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(MongoRegex left, MongoRegex right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(MongoRegex left, MongoRegex right) + { + return !Equals(left, right); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + unchecked + { + return ((Expression != null ? Expression.GetHashCode() : 0)*397) ^ (RawOptions != null ? RawOptions.GetHashCode() : 0); + } + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return string.Format("{0}{1}", Expression, RawOptions); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/MongoRegexOption.cs b/source/MongoDB/MongoRegexOption.cs new file mode 100644 index 00000000..0dff132c --- /dev/null +++ b/source/MongoDB/MongoRegexOption.cs @@ -0,0 +1,30 @@ +using System; + +namespace MongoDB +{ + /// + /// Mongo Regex options + /// + [Flags] + public enum MongoRegexOption + { + /// + /// Specifies that no options are set. + /// + None = 0, + /// + /// i - Specifies case-insensitive matching. + /// + IgnoreCase = 1, + /// + /// m - Multiline mode. Changes the meaning of ^ and $ so they match at the beginning + /// and end, respectively, of any line, and not just the beginning and end of the + /// entire string. + /// + Multiline = 2, + /// + /// g - Eliminates unescaped white space from the pattern. + /// + IgnorePatternWhitespace = 4 + } +} \ No newline at end of file diff --git a/source/MongoDB/MongoServerEndPoint.cs b/source/MongoDB/MongoServerEndPoint.cs new file mode 100644 index 00000000..47a9ecd5 --- /dev/null +++ b/source/MongoDB/MongoServerEndPoint.cs @@ -0,0 +1,199 @@ +using System; +using System.Globalization; +using System.Net; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; + +namespace MongoDB +{ + /// + /// Represents a mongodb server with host and port. + /// + [Serializable] + public sealed class MongoServerEndPoint : EndPoint, IEquatable, IXmlSerializable + { + /// + /// The mongo default host name. + /// + public const string DefaultHost = "localhost"; + /// + /// The mongo default server port. + /// + public const int DefaultPort = 27017; + + /// + /// The default MongoServerEndPoint. + /// + public static readonly MongoServerEndPoint Default = new MongoServerEndPoint(); + + /// + /// Initializes a new instance of the class. + /// + public MongoServerEndPoint() + : this(DefaultHost, DefaultPort) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The host. + public MongoServerEndPoint(string host) + : this(host, DefaultPort) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The port. + public MongoServerEndPoint(int port) + : this(DefaultHost, port) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The host. + /// The port. + public MongoServerEndPoint(string host, int port) + { + if(host == null) + throw new ArgumentNullException("host"); + + Host = host; + Port = port; + } + + /// + /// Gets or sets the host. + /// + /// The host. + public string Host { get; private set; } + + /// + /// Gets or sets the port. + /// + /// The port. + public int Port { get; private set; } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return string.Format(CultureInfo.CurrentCulture, "{0}:{1}", Host, Port); + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(MongoServerEndPoint other) + { + if(ReferenceEquals(null, other)) + return false; + if(ReferenceEquals(this, other)) + return true; + return Equals(other.Host, Host) && other.Port == Port; + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + /// + /// The parameter is null. + /// + public override bool Equals(object obj) + { + if(ReferenceEquals(null, obj)) + return false; + if(ReferenceEquals(this, obj)) + return true; + return obj.GetType() == typeof(MongoServerEndPoint) && Equals((MongoServerEndPoint)obj); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + unchecked + { + return ((Host != null ? Host.GetHashCode() : 0)*397) ^ Port; + } + } + + /// + /// Implements the operator ==. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator ==(MongoServerEndPoint left, MongoServerEndPoint right) + { + return Equals(left, right); + } + + /// + /// Implements the operator !=. + /// + /// The left. + /// The right. + /// The result of the operator. + public static bool operator !=(MongoServerEndPoint left, MongoServerEndPoint right) + { + return !Equals(left, right); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + if(reader.MoveToAttribute("host")) + Host = reader.Value; + if(reader.MoveToAttribute("port")) + Port = int.Parse(reader.Value); + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + if(Host!=null) + writer.WriteAttributeString("host",Host); + + writer.WriteAttributeString("port",Port.ToString()); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/MongoSymbol.cs b/source/MongoDB/MongoSymbol.cs new file mode 100644 index 00000000..a85230cd --- /dev/null +++ b/source/MongoDB/MongoSymbol.cs @@ -0,0 +1,272 @@ +using System; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; + +namespace MongoDB +{ + /// + /// Type to hold an interned string that maps to the bson symbol type. + /// + [Serializable] + public struct MongoSymbol : IEquatable, IEquatable, IComparable, IComparable, IXmlSerializable + { + /// + /// Gets or sets the empty. + /// + /// The empty. + public static MongoSymbol Empty { get; private set; } + + /// + /// Initializes the struct. + /// + static MongoSymbol(){ + Empty = new MongoSymbol(); + } + + /// + /// Initializes a new instance of the struct. + /// + /// The value. + public MongoSymbol(string value) + : this(){ + if(!string.IsNullOrEmpty(value)) + Value = String.Intern(value); + } + + /// + /// Gets or sets the value. + /// + /// The value. + public string Value { get; private set; } + + /// + /// Compares the current object with another object of the same type. + /// + /// An object to compare with this object. + /// + /// A 32-bit signed integer that indicates the relative order of the objects being compared. The return value has the following meanings: + /// Value + /// Meaning + /// Less than zero + /// This object is less than the parameter. + /// Zero + /// This object is equal to . + /// Greater than zero + /// This object is greater than . + /// + public int CompareTo(MongoSymbol other){ + return Value.CompareTo(other.Value); + } + + /// + /// Compares the current object with another object of the same type. + /// + /// An object to compare with this object. + /// + /// A 32-bit signed integer that indicates the relative order of the objects being compared. The return value has the following meanings: + /// Value + /// Meaning + /// Less than zero + /// This object is less than the parameter. + /// Zero + /// This object is equal to . + /// Greater than zero + /// This object is greater than . + /// + public int CompareTo(string other){ + return Value.CompareTo(other); + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(MongoSymbol other){ + return Equals(other.Value, Value); + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(string other){ + return Value.Equals(other); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString(){ + return Value; + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + public override bool Equals(object obj){ + if(ReferenceEquals(null, obj)) + return false; + return obj.GetType() == typeof(MongoSymbol) && Equals((MongoSymbol)obj); + } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(MongoSymbol a, MongoSymbol b){ + return SymbolEqual(a.Value, b.Value); + } + + /* + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(MongoSymbol a, string b){ + return SymbolEqual(a.Value, b); + }*/ + + /* + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(string a, MongoSymbol b){ + return SymbolEqual(a, b.Value); + }*/ + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(MongoSymbol a, MongoSymbol b){ + return !(a == b); + } + + /* + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(MongoSymbol a, String b){ + return !(a == b); + }*/ + + /* + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(string a, MongoSymbol b){ + return !(a == b); + }*/ + + /// + /// Performs an implicit conversion from to . + /// + /// The s. + /// The result of the conversion. + public static implicit operator string(MongoSymbol s){ + return s.Value; + } + + /// + /// Performs an implicit conversion from to . + /// + /// The s. + /// The result of the conversion. + public static implicit operator MongoSymbol(string s){ + return new MongoSymbol(s); + } + + /// + /// Determines whether the specified s is empty. + /// + /// The s. + /// + /// true if the specified s is empty; otherwise, false. + /// + public static bool IsEmpty(MongoSymbol s){ + return s == Empty; + } + + /// + /// Symbols the equal. + /// + /// A. + /// The b. + /// + private static bool SymbolEqual(string a, string b){ + return a == b; + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode(){ + return (Value != null ? Value.GetHashCode() : 0); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + if(reader.IsEmptyElement) + return; + + Value = string.Intern(reader.ReadString()); + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + if(Value != null) + writer.WriteString(Value); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Obsolete/Cursor.cs b/source/MongoDB/Obsolete/Cursor.cs new file mode 100644 index 00000000..6d360db7 --- /dev/null +++ b/source/MongoDB/Obsolete/Cursor.cs @@ -0,0 +1,196 @@ +using System.Collections.Generic; + +namespace MongoDB +{ + /// + /// + /// + public class Cursor : ICursor + { + private readonly ICursor _cursor; + /// + /// Initializes a new instance of the class. + /// + /// The cursor. + internal Cursor(ICursor cursor) + { + _cursor = cursor; + } + + /// + /// Gets the id. + /// + /// The id. + public long Id + { + get { return _cursor.Id; } + } + + /// + /// Specs the specified spec. + /// + /// The spec. + /// + public ICursor Spec(Document spec){ + _cursor.Spec(spec); + return this; + } + + /// + /// Limits the specified limit. + /// + /// The limit. + /// + public ICursor Limit(int limit){ + _cursor.Limit(limit); + return this; + } + + /// + /// Skips the specified skip. + /// + /// The skip. + /// + public ICursor Skip(int skip){ + _cursor.Skip(skip); + return this; + } + + /// + /// Fieldses the specified fields. + /// + /// The fields. + /// + public ICursor Fields(Document fields){ + _cursor.Fields(fields); + return this; + } + + /// + /// Optionses the specified options. + /// + /// The options. + /// + public ICursor Options(QueryOptions options){ + _cursor.Options(options); + return this; + } + + /// + /// Sorts the specified field. + /// + /// The field. + /// + public ICursor Sort(string field){ + _cursor.Sort(field); + return this; + } + + /// + /// Sorts the specified field. + /// + /// The field. + /// The order. + /// + public ICursor Sort(string field, IndexOrder order){ + _cursor.Sort(field, order); + return this; + } + + /// + /// Sorts the specified fields. + /// + /// The fields. + /// + public ICursor Sort(Document fields){ + _cursor.Sort(fields); + return this; + } + + /// + /// Hints the specified index. + /// + /// The index. + /// + public ICursor Hint(Document index){ + _cursor.Hint(index); + return this; + } + + /// + /// Keeps the cursor open. + /// + /// if set to true [value]. + /// + /// + /// By default cursors are closed automaticly after documents + /// are Enumerated. + /// + public ICursor KeepCursor(bool value) + { + _cursor.KeepCursor(value); + return this; + } + + /// + /// Snapshots this instance. + /// + /// + public ICursor Snapshot(){ + _cursor.Snapshot(); + return this; + } + + /// + /// Explains this instance. + /// + /// + public Document Explain(){ + return _cursor.Explain(); + } + + /// + /// Gets a value indicating whether this instance is modifiable. + /// + /// + /// true if this instance is modifiable; otherwise, false. + /// + public bool IsModifiable { + get { return _cursor.IsModifiable; } + } + + /// + /// Gets the documents. + /// + /// The documents. + public IEnumerable Documents { + get {return _cursor.Documents;} + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose(){ + _cursor.Dispose(); + } + } +} + +// /// +// /// Initializes a new instance of the class. +// /// +// /// The conn. +// /// Full name of the collection. +// public Cursor(Connection connection, string fullCollectionName):base(connection, fullCollectionName){ +// +// } +// +// /// +// /// Initializes a new instance of the class. +// /// +// /// The conn. +// /// Full name of the collection. +// [Obsolete("Use Cursor(Connection, fullCollectionName) and then call the Spec, Limit, Skip and Fields methods")] +// public Cursor(Connection connection, string fullCollectionName, Document spec, int limit, int skip, Document fields) +// :base(connection, fullCollectionName, spec, limit, skip, fields){ +// } diff --git a/source/MongoDB/Obsolete/ICursor.cs b/source/MongoDB/Obsolete/ICursor.cs new file mode 100644 index 00000000..6ffb5abc --- /dev/null +++ b/source/MongoDB/Obsolete/ICursor.cs @@ -0,0 +1,118 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB { + /// + /// + /// + [Obsolete("Uses ICursor instead. This class will be possible not included in future releases.")] + public interface ICursor : IDisposable + { + /// + /// Gets the id. + /// + /// The id. + long Id { get; } + + /// + /// Specs the specified spec. + /// + /// The spec. + /// + ICursor Spec(Document spec); + + /// + /// Limits the specified limit. + /// + /// The limit. + /// + ICursor Limit(int limit); + + /// + /// Skips the specified skip. + /// + /// The skip. + /// + ICursor Skip(int skip); + + /// + /// Fieldses the specified fields. + /// + /// The fields. + /// + ICursor Fields(Document fields); + + /// + /// Optionses the specified options. + /// + /// The options. + /// + ICursor Options(QueryOptions options); + + /// + /// Sorts the specified field. + /// + /// The field. + /// + ICursor Sort(string field); + + /// + /// Sorts the specified field. + /// + /// The field. + /// The order. + /// + ICursor Sort(string field, IndexOrder order); + + /// + /// Sorts the specified fields. + /// + /// The fields. + /// + ICursor Sort(Document fields); + + /// + /// Hints the specified index. + /// + /// The index. + /// + ICursor Hint(Document index); + + /// + /// Keeps the cursor open. + /// + /// if set to true [value]. + /// + /// + /// By default cursors are closed automaticly after documents + /// are Enumerated. + /// + ICursor KeepCursor(bool value); + + /// + /// Snapshots this instance. + /// + /// + ICursor Snapshot(); + + /// + /// Explains this instance. + /// + /// + Document Explain(); + + /// + /// Gets a value indicating whether this instance is modifiable. + /// + /// + /// true if this instance is modifiable; otherwise, false. + /// + bool IsModifiable { get; } + + /// + /// Gets the documents. + /// + /// The documents. + IEnumerable Documents { get; } + } +} diff --git a/source/MongoDB/Obsolete/IMongoCollection.cs b/source/MongoDB/Obsolete/IMongoCollection.cs new file mode 100644 index 00000000..6db47fd2 --- /dev/null +++ b/source/MongoDB/Obsolete/IMongoCollection.cs @@ -0,0 +1,292 @@ +using System; +using System.Collections.Generic; + +namespace MongoDB +{ + /// + /// + [Obsolete("Uses IMongoCollection instead. This class will be possible not included in future releases.")] + public interface IMongoCollection + { + /// + /// Gets the name. + /// + /// The name. + string Name { get; } + + /// + /// Gets the name of the database. + /// + /// The name of the database. + string DatabaseName { get; } + + /// + /// Gets the full name. + /// + /// The full name. + string FullName { get; } + + /// + /// Gets the database. + /// + /// The database. + IMongoDatabase Database { get; } + + /// + /// Gets the meta data. + /// + /// The meta data. + CollectionMetadata Metadata { get; } + + /// + /// Finds the one. + /// + /// The selector. + /// + Document FindOne(Document selector); + + /// + /// Finds all. + /// + /// + ICursor FindAll(); + + /// + /// Finds the specified where. + /// + /// The where. + /// + ICursor Find(String @where); + + /// + /// Finds the specified selector. + /// + /// The selector. + /// + ICursor Find(Document selector); + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// + ICursor Find(Document spec, int limit, int skip); + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// The fields. + /// + ICursor Find(Document spec, int limit, int skip, Document fields); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// A + Document FindAndModify(Document document, Document selector); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// containing the names of columns to sort on with the values being the + /// A + /// + Document FindAndModify(Document document, Document selector, Document sort); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// if set to true [return new]. + /// A + Document FindAndModify(Document document, Document selector, bool returnNew); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// The selector. + /// containing the names of columns to sort on with the values being the + /// + /// if set to true [return new]. + /// A + Document FindAndModify(Document document, Document selector, Document sort, bool returnNew); + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// The fields. + /// if set to true [remove]. + /// if set to true [return new]. + /// if set to true [upsert]. + /// A + Document FindAndModify(Document document, Document spec, Document sort, Document fields, bool remove, bool returnNew, bool upsert); + + /// + /// Maps the reduce. + /// + /// + MapReduce MapReduce(); + + /// + /// Counts this instance. + /// + /// + long Count(); + + /// + /// Counts the specified spec. + /// + /// The selector. + /// + long Count(Document selector); + + /// + /// Inserts the specified doc. + /// + /// The document. + void Insert(Document document); + + /// + /// Inserts the specified doc. + /// + /// The document. + /// if set to true [safemode]. + void Insert(Document document, bool safemode); + + /// + /// Inserts the specified docs. + /// + /// The docs. + void Insert(IEnumerable documents); + + /// + /// Inserts the specified docs. + /// + /// The docs. + /// if set to true [safemode]. + void Insert(IEnumerable documents, bool safemode); + + /// + /// Deletes the specified selector. + /// + /// The selector. + [Obsolete("Use Remove instead")] + void Delete(Document selector); + + /// + /// Removes the specified selector. + /// + /// The selector. + void Remove(Document selector); + + /// + /// Deletes the specified selector. + /// + /// The selector. + /// if set to true [safemode]. + [Obsolete("Use Remove instead")] + void Delete(Document selector, bool safemode); + + /// + /// Removes the specified selector. + /// + /// The selector. + /// if set to true [safemode]. + void Remove(Document selector, bool safemode); + + /// + /// Updates the specified doc. + /// + /// The document. + [Obsolete("Use Save instead")] + void Update(Document document); + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + void Update(Document document, Document selector); + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// The flags. + void Update(Document document, Document selector, UpdateFlags flags); + + /// + /// Updates the specified doc. + /// + /// The document. + /// if set to true [safemode]. + [Obsolete("Use Save instead")] + void Update(Document document, bool safemode); + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + void Update(Document document, Document selector, bool safemode); + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// The flags. + /// if set to true [safemode]. + void Update(Document document, Document selector, UpdateFlags flags, bool safemode); + + /// + /// Updates all. + /// + /// The document. + /// The selector. + void UpdateAll(Document document, Document selector); + + /// + /// Updates all. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + void UpdateAll(Document document, Document selector, bool safemode); + + /// + /// Saves the specified doc. + /// + /// The document. + void Save(Document document); + + /// + /// Saves the specified doc. + /// + /// The document. + /// if set to true [safemode]. + void Save(Document document, bool safemode); + } +} \ No newline at end of file diff --git a/source/MongoDB/Obsolete/MongoCollection.cs b/source/MongoDB/Obsolete/MongoCollection.cs new file mode 100644 index 00000000..c9347f07 --- /dev/null +++ b/source/MongoDB/Obsolete/MongoCollection.cs @@ -0,0 +1,417 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration; +using MongoDB.Connections; + +namespace MongoDB +{ + /// + /// + public class MongoCollection : IMongoCollection + { + private readonly MongoCollection _collection; + + /// + /// Initializes a new instance of the class. + /// + /// The configuration. + /// The connection. + /// Name of the database. + /// The name. + internal MongoCollection(MongoConfiguration configuration, Connection connection, string databaseName, string name) + { + //Todo: add public constrcutor for users to call + _collection = new MongoCollection(configuration, connection, databaseName, name); + } + + /// + /// Gets the database. + /// + /// The database. + public IMongoDatabase Database + { + get { return _collection.Database; } + } + + /// + /// Gets the name. + /// + /// The name. + public string Name + { + get { return _collection.Name; } + } + + /// + /// Gets the name of the database. + /// + /// The name of the database. + public string DatabaseName + { + get { return _collection.DatabaseName; } + } + + /// + /// Gets the full name. + /// + /// The full name. + public string FullName + { + get { return _collection.FullName; } + } + + /// + /// Gets the meta data. + /// + /// The meta data. + public CollectionMetadata Metadata + { + get { return _collection.Metadata; } + } + + /// + /// Finds the one. + /// + /// The spec. + /// + public Document FindOne(Document spec) + { + return _collection.FindOne(spec); + } + + /// + /// Finds all. + /// + /// + public ICursor FindAll() + { + return new Cursor(_collection.FindAll()); + } + + /// + /// Finds the specified where. + /// + /// The where. + /// + public ICursor Find(string @where) + { + return new Cursor(_collection.Find(@where)); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// + public ICursor Find(Document spec) + { + return new Cursor(_collection.Find(spec)); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// + public ICursor Find(Document spec, int limit, int skip) + { + return new Cursor(_collection.Find(spec, limit, skip)); + } + + /// + /// Finds the specified spec. + /// + /// The spec. + /// The limit. + /// The skip. + /// The fields. + /// + public ICursor Find(Document spec, int limit, int skip, Document fields) + { + return new Cursor(_collection.Find(spec, limit, skip, fields)); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// A + public Document FindAndModify(Document document, Document spec) + { + return _collection.FindAndModify(document, spec); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// A + /// + public Document FindAndModify(Document document, Document spec, Document sort) + { + return _collection.FindAndModify(document, spec, sort); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// if set to true [return new]. + /// A + public Document FindAndModify(Document document, Document spec, bool returnNew) + { + return _collection.FindAndModify(document, spec, returnNew); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// if set to true [return new]. + /// A + public Document FindAndModify(Document document, Document spec, Document sort, bool returnNew) + { + return _collection.FindAndModify(document, spec, sort, null, false, returnNew, false); + } + + /// + /// Executes a query and atomically applies a modifier operation to the first document returning the original document + /// by default. + /// + /// The document. + /// to find the document. + /// containing the names of columns to sort on with the values being the + /// + /// The fields. + /// if set to true [remove]. + /// if set to true [return new]. + /// if set to true [upsert]. + /// A + public Document FindAndModify(Document document, Document spec, Document sort, Document fields, bool remove, bool returnNew, bool upsert) + { + return _collection.FindAndModify(document, spec, sort, fields, remove, returnNew, upsert); + } + + /// + /// Maps the reduce. + /// + /// + public MapReduce MapReduce() + { + return _collection.MapReduce(); + } + + /// + /// Counts this instance. + /// + /// + public long Count() + { + return _collection.Count(); + } + + /// + /// Counts the specified spec. + /// + /// The spec. + /// + public long Count(Document spec) + { + return _collection.Count(spec); + } + + /// + /// Inserts the specified doc. + /// + /// The document. + public void Insert(Document document) + { + _collection.Insert(document); + } + + /// + /// Inserts the specified doc. + /// + /// The document. + /// if set to true [safemode]. + public void Insert(Document document, bool safemode) + { + _collection.Insert(document, safemode); + } + + /// + /// Inserts the specified docs. + /// + /// The docs. + public void Insert(IEnumerable documents) + { + _collection.Insert(documents); + } + + /// + /// Inserts the specified docs. + /// + /// The docs. + /// if set to true [safemode]. + public void Insert(IEnumerable documents, bool safemode) + { + _collection.Insert(documents, safemode); + } + + /// + /// Deletes the specified selector. + /// + /// The selector. + [Obsolete("Use Remove instead")] + public void Delete(Document selector) + { + _collection.Delete(selector); + } + + /// + /// Removes the specified selector. + /// + /// The selector. + public void Remove(Document selector) + { + _collection.Remove(selector); + } + + /// + /// Deletes the specified selector. + /// + /// The selector. + /// if set to true [safemode]. + [Obsolete("Use Remove instead")] + public void Delete(Document selector, bool safemode) + { + _collection.Delete(selector, safemode); + } + + /// + /// Removes the specified selector. + /// + /// The selector. + /// if set to true [safemode]. + public void Remove(Document selector, bool safemode) + { + _collection.Remove(selector, safemode); + } + + /// + /// Updates the specified doc. + /// + /// The document. + /// if set to true [safemode]. + public void Update(Document document, bool safemode) + { + _collection.Save(document, safemode); + } + + /// + /// Updates the specified doc. + /// + /// The document. + public void Update(Document document) + { + _collection.Save(document); + } + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + public void Update(Document document, Document selector) + { + _collection.Update(document, selector); + } + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + public void Update(Document document, Document selector, bool safemode) + { + _collection.Update(document, selector, safemode); + } + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// The flags. + public void Update(Document document, Document selector, UpdateFlags flags) + { + _collection.Update(document, selector, flags); + } + + /// + /// Updates the specified doc. + /// + /// The document. + /// The selector. + /// The flags. + /// if set to true [safemode]. + public void Update(Document document, Document selector, UpdateFlags flags, bool safemode) + { + _collection.Update(document, selector, flags, safemode); + } + + /// + /// Updates all. + /// + /// The document. + /// The selector. + public void UpdateAll(Document document, Document selector) + { + _collection.UpdateAll(document, selector); + } + + /// + /// Updates all. + /// + /// The document. + /// The selector. + /// if set to true [safemode]. + public void UpdateAll(Document document, Document selector, bool safemode) + { + _collection.UpdateAll(document, selector, safemode); + } + + /// + /// Saves the specified doc. + /// + /// The document. + public void Save(Document document) + { + _collection.Save(document); + } + + /// + /// Saves the specified doc. + /// + /// The document. + /// if set to true [safemode]. + public void Save(Document document, bool safemode) + { + _collection.Save(document, safemode); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Oid.cs b/source/MongoDB/Oid.cs new file mode 100644 index 00000000..0a9fe6ad --- /dev/null +++ b/source/MongoDB/Oid.cs @@ -0,0 +1,344 @@ +using System; +using System.Text.RegularExpressions; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; +using MongoDB.Bson; +using MongoDB.Util; + +namespace MongoDB +{ + /// + /// Oid is an immutable object that represents a Mongo ObjectId. + /// + [Serializable] + public sealed class Oid : IEquatable, IComparable, IFormattable, IXmlSerializable + { + private static readonly OidGenerator OidGenerator = new OidGenerator(); + private byte[] _bytes; + + /// + /// Initializes a new instance of the class. + /// + /// + /// Needed for some serializers. + /// + private Oid() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The value. + public Oid(string value) + { + if(value == null) + throw new ArgumentNullException("value"); + + ParseBytes(value); + } + + /// + /// Initializes a new instance of the class. + /// + /// The value. + public Oid(byte[] value) + { + if(value == null) + throw new ArgumentNullException("value"); + + _bytes = new byte[12]; + Array.Copy(value, _bytes, 12); + } + + /// + /// Initializes a new instance of the class. + /// + /// The oid. + public Oid(Oid oid) + { + if(oid == null) + throw new ArgumentNullException("oid"); + + _bytes = oid._bytes; + } + + /// + /// Gets the created. + /// + /// The created. + public DateTime Created + { + get + { + var time = new byte[4]; + Array.Copy(_bytes, time, 4); + Array.Reverse(time); + var seconds = BitConverter.ToInt32(time, 0); + return BsonInfo.Epoch.AddSeconds(seconds); + } + } + + /// + /// Compares the current object with another object of the same type. + /// + /// An object to compare with this object. + /// + /// A 32-bit signed integer that indicates the relative order of the objects being compared. The return value has the following meanings: + /// Value + /// Meaning + /// Less than zero + /// This object is less than the parameter. + /// Zero + /// This object is equal to . + /// Greater than zero + /// This object is greater than . + /// + public int CompareTo(Oid other) + { + if(ReferenceEquals(other, null)) + return 1; + var otherBytes = other.ToByteArray(); + for(var x = 0; x < _bytes.Length; x++) + if(_bytes[x] < otherBytes[x]) + return -1; + else if(_bytes[x] > otherBytes[x]) + return 1; + return 0; + } + + /// + /// Indicates whether the current object is equal to another object of the same type. + /// + /// An object to compare with this object. + /// + /// true if the current object is equal to the parameter; otherwise, false. + /// + public bool Equals(Oid other) + { + return CompareTo(other) == 0; + } + + /// + /// Determines whether the specified is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + public override bool Equals(object obj) + { + if(obj is Oid) + return CompareTo((Oid)obj) == 0; + return false; + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + return ToString().GetHashCode(); + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return BitConverter.ToString(_bytes).Replace("-", "").ToLower(); + } + + /// + /// Returns a that represents this instance. + /// + /// The format. + /// + /// A that represents this instance. + /// + /// + /// J = Returns Javascript string + /// + public string ToString(string format) + { + if(string.IsNullOrEmpty(format)) + return ToString(); + + if(format == "J") + return String.Format("\"{0}\"", ToString()); + + throw new ArgumentException("Invalid format string","format"); + } + + /// + /// Returns a that represents this instance. + /// + /// The format. + /// The format provider. + /// + /// A that represents this instance. + /// + /// + /// J = Returns Javascript string + /// + public string ToString(string format, IFormatProvider formatProvider) + { + return ToString(format); + } + + /// + /// Converts the Oid to a byte array. + /// + public byte[] ToByteArray() + { + var ret = new byte[12]; + Array.Copy(_bytes, ret, 12); + return ret; + } + + /// + /// Generates an Oid using OidGenerator. + /// + /// + /// A + /// + public static Oid NewOid() + { + return OidGenerator.Generate(); + } + + /// + /// Implements the operator ==. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator ==(Oid a, Oid b){ + if (ReferenceEquals(a, b)){ + return true; + } + if((Object)a == null || (Object)b == null){ + return false; + } + + return a.Equals(b); + } + + /// + /// Implements the operator !=. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator !=(Oid a, Oid b) + { + return !(a == b); + } + + /// + /// Implements the operator >. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator >(Oid a, Oid b) + { + return a.CompareTo(b) > 0; + } + + /// + /// Implements the operator <. + /// + /// A. + /// The b. + /// The result of the operator. + public static bool operator <(Oid a, Oid b) + { + return a.CompareTo(b) < 0; + } + + /// + /// Validates the hex. + /// + /// The value. + private void ValidateHex(string value) + { + if(value == null || value.Length != 24) + throw new ArgumentException("Oid strings should be 24 characters"); + + var notHexChars = new Regex(@"[^A-Fa-f0-9]", RegexOptions.None); + if(notHexChars.IsMatch(value)) + throw new ArgumentOutOfRangeException("value", "Value contains invalid characters"); + } + + /// + /// Decodes the hex. + /// + /// The value. + /// + private static byte[] DecodeHex(string value) + { + var numberChars = value.Length; + + var bytes = new byte[numberChars/2]; + for(var i = 0; i < numberChars; i += 2) + try + { + bytes[i/2] = Convert.ToByte(value.Substring(i, 2), 16); + } + catch + { + //failed to convert these 2 chars, they may contain illegal charracters + bytes[i/2] = 0; + } + return bytes; + } + + /// + /// Parses the bytes. + /// + /// The value. + private void ParseBytes(string value) + { + value = value.Replace("\"", ""); + ValidateHex(value); + _bytes = DecodeHex(value); + } + + /// + /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the to the class. + /// + /// + /// An that describes the XML representation of the object that is produced by the method and consumed by the method. + /// + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + /// + /// Generates an object from its XML representation. + /// + /// The stream from which the object is deserialized. + void IXmlSerializable.ReadXml(XmlReader reader) + { + ParseBytes(reader.ReadElementContentAsString()); + } + + /// + /// Converts an object into its XML representation. + /// + /// The stream to which the object is serialized. + void IXmlSerializable.WriteXml(XmlWriter writer) + { + writer.WriteString(ToString()); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Op.cs b/source/MongoDB/Op.cs new file mode 100644 index 00000000..30d9f435 --- /dev/null +++ b/source/MongoDB/Op.cs @@ -0,0 +1,189 @@ +using System; +using MongoDB.Bson; + +namespace MongoDB +{ + /// + /// Staticly typed way of using MongoDB query operators. + /// + public class Op : Document + { + /// + /// Initializes a new instance of the class. + /// + /// Only allow instantiation through static methods. + private Op() + { } + + /// + /// Matches an object which is greater than the specified value. + /// + /// + /// The value. + /// + public static Op GreaterThan(T value) + { + return (Op)new Op().Add("$gt", value); + } + + /// + /// Matches an object which is greater than or equal to the specified value. + /// + /// + /// The value. + /// + public static Op GreaterThanOrEqual(T value) + { + return (Op)new Op().Add("$gte", value); + } + + /// + /// Matches an object which is less than the specified value. + /// + /// + /// The value. + /// + public static Op LessThan(T value) + { + return (Op)new Op().Add("$lt", value); + } + + /// + /// Matches an object which is less than or equal to the specified value. + /// + /// + /// The value. + /// + public static Op LessThanOrEqual(T value) + { + return (Op)new Op().Add("$lte", value); + } + + /// + /// Matches an object which does not equal the specified value. + /// + /// + /// The value. + /// + public static Op NotEqual(T value) + { + return (Op)new Op().Add("$ne", value); + } + + /// + /// Matches an array which has one of the specified values. + /// + /// + /// The values. + /// + public static Op In(params T[] values) + { + return (Op)new Op().Add("$in", values); + } + + /// + /// Matches an array which does not have any of the specified values. + /// + /// + /// The values. + /// + public static Op NotIn(params T[] values) + { + return (Op)new Op().Add("$nin", values); + } + + /// + /// Matches an array which has all of the specified values. + /// + /// + /// The values. + /// + public static Op All(params T[] values) + { + return (Op)new Op().Add("$all", values); + } + + /// + /// Modulus operator. + /// + /// The denominator. + /// The result. + /// + public static Op Mod(int denominator, int result) + { + return (Op)new Op().Add("$mod", new[] { denominator, result }); + } + + /// + /// Matches any array with the specified number of elements + /// + /// The size. + /// + public static Op Size(int size) + { + return (Op)new Op().Add("$size", size); + } + + /// + /// Check for existence of a field. + /// + /// + public static Op Exists() + { + return (Op)new Op().Add("$exists", true); + } + + /// + /// Check for lack of existence of a field. + /// + /// + public static Op NotExists() + { + return (Op)new Op().Add("$exists", false); + } + + /// + /// Matches values based on their bson type. + /// + /// Type of the bson. + /// + public static Op Type(BsonType bsonType) + { + return (Op)new Op().Add("$type", (int)bsonType); + } + + /// + /// Sends the Javascript expressiosn to the server. + /// + /// The javascript. + /// + public static Op Where(string javascript) + { + if(javascript == null) + throw new ArgumentNullException("javascript"); + + return (Op)new Op().Add("$where", new Code(javascript)); + } + + /// + /// Implements the operator &. This is used for conjunctions. + /// + /// The op1. + /// The op2. + /// The result of the operator. + public static Op operator &(Op op1, Op op2) + { + return (Op)new Op().Merge(op1).Merge(op2); + } + + /// + /// Implements the operator !. This is used for the meta operator $not. + /// + /// The op. + /// The result of the operator. + public static Op operator !(Op op) + { + return (Op)new Op().Add("$not", op); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/DeleteMessage.cs b/source/MongoDB/Protocol/DeleteMessage.cs new file mode 100644 index 00000000..d734387d --- /dev/null +++ b/source/MongoDB/Protocol/DeleteMessage.cs @@ -0,0 +1,63 @@ +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_DELETE message is used to remove one or more messages from a collection. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 ZERO; // 0 - reserved for future use + /// cstring fullCollectionName; // "dbname.collectionname" + /// int32 ZERO; // 0 - reserved for future use + /// BSON selector; // query object. See below for details. + /// } + /// + internal class DeleteMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + public DeleteMessage(BsonWriterSettings bsonWriterSettings) + : base(bsonWriterSettings){ + Header = new MessageHeader(OpCode.Delete); + } + + /// + /// Gets or sets the full name of the collection. + /// + /// The full name of the collection. + public string FullCollectionName { get; set; } + + /// + /// Gets or sets the selector. + /// + /// The selector. + public object Selector { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.WriteValue(BsonType.Integer, 0); + writer.Write(FullCollectionName, false); + writer.WriteValue(BsonType.Integer, 0); + writer.WriteObject(Selector); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + var size = 8; //first int32, second int32 + size += writer.CalculateSize(FullCollectionName, false); + size += writer.CalculateSizeObject(Selector); + return size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/GetMoreMessage.cs b/source/MongoDB/Protocol/GetMoreMessage.cs new file mode 100644 index 00000000..eea3fc36 --- /dev/null +++ b/source/MongoDB/Protocol/GetMoreMessage.cs @@ -0,0 +1,85 @@ +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_GETMORE message is used to query the database for documents in a collection. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 ZERO; // 0 - reserved for future use + /// cstring fullCollectionName; // "dbname.collectionname" + /// int32 numberToReturn; // number of documents to return + /// int64 cursorID; // cursorID from the OP_REPLY + /// } + /// + internal class GetMoreMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + /// Full name of the collection. + /// The cursor id. + public GetMoreMessage(string fullCollectionName, long cursorId) + : this(fullCollectionName, cursorId, 0) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Full name of the collection. + /// The cursor id. + /// The number to return. + public GetMoreMessage(string fullCollectionName, long cursorId, int numberToReturn) + : base(new BsonWriterSettings()) + { + Header = new MessageHeader(OpCode.GetMore); + FullCollectionName = fullCollectionName; + CursorId = cursorId; + NumberToReturn = numberToReturn; + } + + /// + /// cursorID from the OP_REPLY. + /// + /// The cursor id. + public long CursorId { get; set; } + + /// + /// Gets or sets the full name of the collection. + /// + /// The full name of the collection. + public string FullCollectionName { get; set; } + + /// + /// Gets or sets the number to return. + /// + /// The number to return. + public int NumberToReturn { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.WriteValue(BsonType.Integer, 0); + writer.Write(FullCollectionName, false); + writer.WriteValue(BsonType.Integer, NumberToReturn); + writer.WriteValue(BsonType.Long, CursorId); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + var size = 4; //first int32 + size += writer.CalculateSize(FullCollectionName, false); + size += 12; //number to return + cursorid + return size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/IRequestMessage.cs b/source/MongoDB/Protocol/IRequestMessage.cs new file mode 100644 index 00000000..d944afa6 --- /dev/null +++ b/source/MongoDB/Protocol/IRequestMessage.cs @@ -0,0 +1,16 @@ +using System.IO; + +namespace MongoDB.Protocol +{ + /// + /// A Message that is to be written to the database. + /// + internal interface IRequestMessage + { + /// + /// Writes the specified stream. + /// + /// The stream. + void Write (Stream stream); + } +} diff --git a/source/MongoDB/Protocol/InsertMessage.cs b/source/MongoDB/Protocol/InsertMessage.cs new file mode 100644 index 00000000..241f8dc1 --- /dev/null +++ b/source/MongoDB/Protocol/InsertMessage.cs @@ -0,0 +1,147 @@ +using System; +using System.Collections.Generic; +using System.IO; +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_INSERT message is used to insert one or more documents into a collection. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 ZERO; // 0 - reserved for future use + /// cstring fullCollectionName; // "dbname.collectionname" + /// BSON[] documents; // one or more documents to insert into the collection + /// } + /// + internal class InsertMessage : MessageBase, IRequestMessage + { + private readonly BsonWriterSettings _bsonWriterSettings; + private readonly List _chunks = new List(); + + /// + /// Initializes a new instance of the class. + /// + public InsertMessage(BsonWriterSettings bsonWriterSettings) + { + if(bsonWriterSettings == null) + throw new ArgumentNullException("bsonWriterSettings"); + + _bsonWriterSettings = bsonWriterSettings; + Header = new MessageHeader(OpCode.Insert); + } + + /// + /// Gets or sets the full name of the collection. + /// + /// The full name of the collection. + public string FullCollectionName { get; set; } + + /// + /// Gets or sets the documents. + /// + /// The documents. + public object[] Documents { get; set; } + + /// + /// Writes the specified stream. + /// + /// The stream. + public void Write(Stream stream){ + var bstream = new BufferedStream(stream); + var bwriter = new BsonWriter(bstream, _bsonWriterSettings); + + ChunkMessage(bwriter); + + foreach(var chunk in _chunks) + WriteChunk(bstream, chunk); + } + + /// + /// Breaks down an insert message that may be too large into managable sizes. + /// When inserting only one document there will be only one chunk. However chances + /// are that when inserting thousands of documents at once there will be many. + /// + protected void ChunkMessage(BsonWriter writer){ + var baseSize = CalculateBaseSize(writer); + + var chunk = new MessageChunk{Size = baseSize, Documents = new List()}; + + foreach(var document in Documents){ + var documentSize = writer.CalculateSize(document); + + if(documentSize + baseSize >= MaximumMessageSize) + throw new MongoException("Document is too big to fit in a message."); + + if(documentSize + chunk.Size > MaximumMessageSize){ + _chunks.Add(chunk); + chunk = new MessageChunk{Size = baseSize, Documents = new List()}; + } + + chunk.Documents.Add(document); + chunk.Size += documentSize; + } + + _chunks.Add(chunk); + } + + /// + /// The base size that all chunks will have. + /// + protected int CalculateBaseSize(BsonWriter writer){ + var size = 4; //first int32 + size += writer.CalculateSize(FullCollectionName, false); + size += Header.MessageLength; + return size; + } + + /// + /// Writes out a header and the chunk of documents. + /// + /// + /// + protected void WriteChunk(Stream stream, MessageChunk chunk){ + WriteHeader(new BinaryWriter(stream), chunk.Size); + + var writer = new BsonWriter(stream, _bsonWriterSettings); + writer.WriteValue(BsonType.Integer, 0); + writer.Write(FullCollectionName, false); + + foreach(var document in chunk.Documents) + writer.WriteObject(document); + + writer.Flush(); + } + + /// + /// Writes the header. + /// + /// The writer. + /// Size of the MSG. + protected void WriteHeader(BinaryWriter writer, int messageSize){ + var header = Header; + writer.Write(messageSize); + writer.Write(header.RequestId); + writer.Write(header.ResponseTo); + writer.Write((int)header.OpCode); + writer.Flush(); + } + + /// + /// + /// + protected struct MessageChunk + { + /// + /// + /// + public List Documents; + /// + /// + /// + public int Size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/KillCursorsMessage.cs b/source/MongoDB/Protocol/KillCursorsMessage.cs new file mode 100644 index 00000000..2a8214d9 --- /dev/null +++ b/source/MongoDB/Protocol/KillCursorsMessage.cs @@ -0,0 +1,75 @@ +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_KILL_CURSORS message is used to close an active + /// cursor in the database. This is necessary to ensure + /// that database resources are reclaimed at the end of the query. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 ZERO; // 0 - reserved for future use + /// int32 numberOfCursorIDs; // number of cursorIDs in message + /// int64[] cursorIDs; // array of cursorIDs to close + /// } + /// + internal class KillCursorsMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + public KillCursorsMessage() + :base(new BsonWriterSettings()){ + Header = new MessageHeader(OpCode.KillCursors); + } + + /// + /// Initializes a new instance of the class. + /// + /// The cursor id. + public KillCursorsMessage(long cursorId) : this() + { + CursorIds = new[]{cursorId}; + } + + /// + /// Initializes a new instance of the class. + /// + /// The cursor I ds. + public KillCursorsMessage(long[] cursorIDs) : this() + { + CursorIds = cursorIDs; + } + + /// + /// Gets or sets the cursor ids. + /// + /// The cursor ids. + public long[] CursorIds { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.WriteValue(BsonType.Integer, 0); + writer.WriteValue(BsonType.Integer, CursorIds.Length); + + foreach(var id in CursorIds) + writer.WriteValue(BsonType.Long, id); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + var size = 8; //first int32, number of cursors + size += (CursorIds.Length*8); + return size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/MessageBase.cs b/source/MongoDB/Protocol/MessageBase.cs new file mode 100644 index 00000000..dfdc8c28 --- /dev/null +++ b/source/MongoDB/Protocol/MessageBase.cs @@ -0,0 +1,19 @@ +namespace MongoDB.Protocol +{ + /// + /// Base class for all raw messages + /// + internal abstract class MessageBase + { + /// + /// + /// + protected const int MaximumMessageSize = 1024 * 1024 * 4; + + /// + /// Gets or sets the header. + /// + /// The header. + public MessageHeader Header { get; protected set; } + } +} diff --git a/source/MongoDB/Protocol/MessageHeader.cs b/source/MongoDB/Protocol/MessageHeader.cs new file mode 100644 index 00000000..55c2b576 --- /dev/null +++ b/source/MongoDB/Protocol/MessageHeader.cs @@ -0,0 +1,63 @@ +using System; +using System.Diagnostics.CodeAnalysis; + +namespace MongoDB.Protocol +{ + /// + /// In general, each Message consists of a standard message header followed by request-specific data. + /// + /// + /// struct { + /// int32 messageLength; // total size of the message, including the 4 bytes of length + /// int32 requestID; // client or database-generated identifier for this message + /// int32 responseTo; // requestID from the original request (used in reponses from db) + /// int32 opCode; // request type - see table below + /// } + /// + internal class MessageHeader + { + /// + /// Initializes a new instance of the class. + /// + /// The op code. + public MessageHeader(OpCode opCode){ + OpCode = opCode; + MessageLength = 16; //The starting size of any message. + } + + /// + /// Total size of the message, including the 4 bytes of length. + /// + /// The length of the message. + public int MessageLength { get; set; } + + /// + /// Client or database-generated identifier for this message. + /// + /// The request id. + public int RequestId { get; set; } + + /// + /// RequestID from the original request (used in reponses from db). + /// + /// The response to. + public int ResponseTo { get; set; } + + /// + /// Request type + /// + /// The op code. + [SuppressMessage("Microsoft.Naming", "CA1706:ShortAcronymsShouldBeUppercase", MessageId = "Member")] + public OpCode OpCode { get; set; } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override String ToString(){ + return "length:" + MessageLength + " requestId:" + RequestId + " responseTo:" + ResponseTo + " opCode:" + OpCode; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/MsgMessage.cs b/source/MongoDB/Protocol/MsgMessage.cs new file mode 100644 index 00000000..391e48a7 --- /dev/null +++ b/source/MongoDB/Protocol/MsgMessage.cs @@ -0,0 +1,48 @@ +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// Deprecated. OP_MSG sends a diagnostic message to the database. + /// The database sends back a fixed resonse. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// cstring message; // message for the database + /// } + /// + internal class MsgMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + public MsgMessage() + : base(new BsonWriterSettings()){ + Header = new MessageHeader(OpCode.Msg); + } + + /// + /// Gets or sets the message. + /// + /// The message. + public string Message { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.Write(Message, false); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + return writer.CalculateSize(Message, false); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/OpCode.cs b/source/MongoDB/Protocol/OpCode.cs new file mode 100644 index 00000000..a83bebf9 --- /dev/null +++ b/source/MongoDB/Protocol/OpCode.cs @@ -0,0 +1,44 @@ +namespace MongoDB.Protocol +{ + /// + /// + internal enum OpCode + { + /// + /// Reply to a client request. responseTo is set. + /// + Reply = 1, + /// + /// Generic msg command followed by a string. + /// + Msg = 1000, + /// + /// update document + /// + Update = 2001, + /// + /// insert new document + /// + Insert = 2002, + /// + /// is this used? + /// + GetByOid = 2003, + /// + /// query a collection + /// + Query = 2004, + /// + /// Get more data from a query. See Cursors. + /// + GetMore = 2005, + /// + /// Delete documents + /// + Delete = 2006, + /// + /// Tell database client is done with a cursor. + /// + KillCursors = 2007 + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/QueryMessage.cs b/source/MongoDB/Protocol/QueryMessage.cs new file mode 100644 index 00000000..25ffb433 --- /dev/null +++ b/source/MongoDB/Protocol/QueryMessage.cs @@ -0,0 +1,143 @@ +using System; +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_QUERY message is used to query the database for documents in a collection. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 opts; // query options. See below for details. + /// cstring fullCollectionName; // "dbname.collectionname" + /// int32 numberToSkip; // number of documents to skip when returning results + /// int32 numberToReturn; // number of documents to return in the first OP_REPLY + /// BSON query ; // query object. See below for details. + /// [ BSON returnFieldSelector; ] // OPTIONAL : selector indicating the fields to return. See below for details. + /// } + /// + internal class QueryMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + public QueryMessage(BsonWriterSettings bsonWriterSettings) + : base(bsonWriterSettings){ + Header = new MessageHeader(OpCode.Query); + } + + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + /// The query. + /// Full name of the collection. + public QueryMessage(BsonWriterSettings bsonWriterSettings, object query, String fullCollectionName) + : this(bsonWriterSettings, query, fullCollectionName, 0, 0){ + } + + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + /// The query. + /// Full name of the collection. + /// The number to return. + /// The number to skip. + public QueryMessage(BsonWriterSettings bsonWriterSettings, object query, String fullCollectionName, Int32 numberToReturn, Int32 numberToSkip) + : this(bsonWriterSettings, query, fullCollectionName, numberToReturn, numberToSkip, null) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + /// The query. + /// Full name of the collection. + /// The number to return. + /// The number to skip. + /// The return field selector. + public QueryMessage(BsonWriterSettings bsonWriterSettings, + object query, + String fullCollectionName, + Int32 numberToReturn, + Int32 numberToSkip, + object returnFieldSelector) + : base(bsonWriterSettings) + { + Header = new MessageHeader(OpCode.Query); + Query = query; + FullCollectionName = fullCollectionName; + NumberToReturn = numberToReturn; + NumberToSkip = numberToSkip; + ReturnFieldSelector = returnFieldSelector; + } + + /// + /// Gets or sets the options. + /// + /// The options. + public QueryOptions Options { get; set; } + + /// + /// Gets or sets the full name of the collection. + /// + /// The full name of the collection. + public string FullCollectionName { get; set; } + + /// + /// Gets or sets the number to skip. + /// + /// The number to skip. + public int NumberToSkip { get; set; } + + /// + /// Gets or sets the number to return. + /// + /// The number to return. + public int NumberToReturn { get; set; } + + /// + /// Gets or sets the query. + /// + /// The query. + public object Query { get; set; } + + /// + /// Gets or sets the return field selector. + /// + /// The return field selector. + public object ReturnFieldSelector { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.WriteValue(BsonType.Integer, (int)Options); + writer.Write(FullCollectionName, false); + writer.WriteValue(BsonType.Integer, NumberToSkip); + writer.WriteValue(BsonType.Integer, NumberToReturn); + writer.WriteObject(Query); + if(ReturnFieldSelector != null) + writer.WriteObject(ReturnFieldSelector); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + var size = 12; //options, numbertoskip, numbertoreturn + size += writer.CalculateSize(FullCollectionName, false); + size += writer.CalculateSizeObject(Query); + if(ReturnFieldSelector != null) + size += writer.CalculateSizeObject(ReturnFieldSelector); + return size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/ReplyMessage.cs b/source/MongoDB/Protocol/ReplyMessage.cs new file mode 100644 index 00000000..c7207b3d --- /dev/null +++ b/source/MongoDB/Protocol/ReplyMessage.cs @@ -0,0 +1,153 @@ +using System; +using System.Collections.Generic; +using System.IO; +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_REPLY message is sent by the database in response to an CONTRIB:OP_QUERY or CONTRIB:OP_GET_MORE message. + /// + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 responseFlag; // normally zero, non-zero on query failure + /// int64 cursorID; // id of the cursor created for this query response + /// int32 startingFrom; // indicates where in the cursor this reply is starting + /// int32 numberReturned; // number of documents in the reply + /// BSON[] documents; // documents + /// } + /// + internal class ReplyMessage : MessageBase where T : class + { + private readonly BsonReaderSettings _readerSettings; + + /// + /// Initializes a new instance of the class. + /// + /// The reader settings. + public ReplyMessage(BsonReaderSettings readerSettings){ + if(readerSettings == null) + throw new ArgumentNullException("readerSettings"); + _readerSettings = readerSettings; + } + + /// + /// normally zero, non-zero on query failure + /// + /// The response flag. + public int ResponseFlag { get; set; } + + /// + /// id of the cursor created for this query response + /// + /// The cursor id. + public long CursorId { get; set; } + + /// + /// indicates where in the cursor this reply is starting + /// + /// The starting from. + public int StartingFrom { get; set; } + + /// + /// number of documents in the reply + /// + /// The number returned. + public int NumberReturned { get; set; } + + /// + /// Gets or sets the documents. + /// + /// The documents. + public T[] Documents { get; set; } + + /// + /// Reads the specified stream. + /// + /// The stream. + public void Read(Stream stream){ + stream = new BufferedStream(stream, 256); + var reader = new BinaryReader(stream); + Header = ReadHeader(reader); + ResponseFlag = reader.ReadInt32(); + CursorId = reader.ReadInt64(); + StartingFrom = reader.ReadInt32(); + NumberReturned = reader.ReadInt32(); + + var breader = new BsonReader(stream, _readerSettings); + var documents = new List(); + + for(var num = 0; num < NumberReturned; num++) + documents.Add((T)breader.ReadObject()); + + Documents = documents.ToArray(); + } + + /// + /// Reads the header. + /// + /// The reader. + /// + protected MessageHeader ReadHeader(BinaryReader reader){ + var header = new MessageHeader(OpCode.Reply){ + MessageLength = reader.ReadInt32(), + RequestId = reader.ReadInt32(), + ResponseTo = reader.ReadInt32() + }; + + var opCode = reader.ReadInt32(); + + if((OpCode)opCode != OpCode.Reply) + throw new InvalidDataException("Should have been a reply but wasn't"); + + return header; + } + + // public void Read(Stream stream){ + // /* Used during debugging of the stream. + // BsonReader headerreader = new BsonReader(stream); + // this.Header = ReadHeader(headerreader); + // + // //buffer the whole response into a memorystream for debugging. + // MemoryStream buffer = new MemoryStream(); + // BinaryReader buffReader = new BinaryReader(stream); + // BinaryWriter buffWriter = new BinaryWriter(buffer); + // byte[] body = buffReader.ReadBytes(this.Header.MessageLength - 16); + // System.Console.WriteLine(BitConverter.ToString(body)); + // buffWriter.Write(body); + // buffer.Seek(0, SeekOrigin.Begin); + // + // BsonReader reader = new BsonReader(buffer);*/ + // + // //BsonReader reader = new BsonReader(stream); + // //BsonReader reader = new BsonReader(new BufferedStream(stream)); + // BsonReader reader = new BsonReader(new BufferedStream(stream, 4 * 1024)); + // this.Header = ReadHeader(reader); + // + // this.ResponseFlag = reader.ReadInt32(); + // this.CursorID = reader.ReadInt64(); + // this.StartingFrom = reader.ReadInt32(); + // this.NumberReturned = reader.ReadInt32(); + // + // List docs = new List(); + // for(int num = 0; num < this.NumberReturned; num++){ + // BsonDocument doc = new BsonDocument(); + // doc.Read(reader); + // docs.Add(doc); + // } + // this.Documents = docs.ToArray(); + // } + // + // protected MessageHeader ReadHeader(BsonReader reader){ + // MessageHeader hdr = new MessageHeader(OpCode.Reply); + // hdr.MessageLength = reader.ReadInt32(); + // hdr.RequestId = reader.ReadInt32(); + // hdr.ResponseTo = reader.ReadInt32(); + // int op = reader.ReadInt32(); + // if((OpCode)op != OpCode.Reply) throw new InvalidDataException("Should have been a reply but wasn't"); + // return hdr; + // } + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/RequestMessageBase.cs b/source/MongoDB/Protocol/RequestMessageBase.cs new file mode 100644 index 00000000..c557202d --- /dev/null +++ b/source/MongoDB/Protocol/RequestMessageBase.cs @@ -0,0 +1,61 @@ +using System; +using System.IO; +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// Description of Message. + /// + internal abstract class RequestMessageBase : MessageBase, IRequestMessage + { + private readonly BsonWriterSettings _bsonWriterSettings; + + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + protected RequestMessageBase(BsonWriterSettings bsonWriterSettings){ + if(bsonWriterSettings == null) + throw new ArgumentNullException("bsonWriterSettings"); + + _bsonWriterSettings = bsonWriterSettings; + } + + /// + /// Writes the specified stream. + /// + /// The stream. + public void Write(Stream stream){ + var header = Header; + var bstream = new BufferedStream(stream); + var writer = new BinaryWriter(bstream); + var bwriter = new BsonWriter(bstream, _bsonWriterSettings); + + Header.MessageLength += CalculateBodySize(bwriter); + if(Header.MessageLength > MaximumMessageSize) + throw new MongoException("Maximum message length exceeded"); + + writer.Write(header.MessageLength); + writer.Write(header.RequestId); + writer.Write(header.ResponseTo); + writer.Write((int)header.OpCode); + writer.Flush(); + WriteBody(bwriter); + bwriter.Flush(); + } + + /// + /// Writes the body. + /// + /// The writer. + protected abstract void WriteBody(BsonWriter writer); + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected abstract int CalculateBodySize(BsonWriter writer); + } +} \ No newline at end of file diff --git a/source/MongoDB/Protocol/UpdateMessage.cs b/source/MongoDB/Protocol/UpdateMessage.cs new file mode 100644 index 00000000..7ee9ccb9 --- /dev/null +++ b/source/MongoDB/Protocol/UpdateMessage.cs @@ -0,0 +1,79 @@ +using MongoDB.Bson; + +namespace MongoDB.Protocol +{ + /// + /// The OP_UPDATE message is used to update a document in a collection. + /// + /// + /// struct { + /// MsgHeader header; // standard message header + /// int32 ZERO; // 0 - reserved for future use + /// cstring fullCollectionName; // "dbname.collectionname" + /// int32 flags; // bit vector. see below + /// BSON selector; // the query to select the document + /// BSON document; // the document data to update with or insert + /// } + /// + internal class UpdateMessage : RequestMessageBase + { + /// + /// Initializes a new instance of the class. + /// + /// The bson writer settings. + public UpdateMessage(BsonWriterSettings bsonWriterSettings) + : base(bsonWriterSettings){ + Header = new MessageHeader(OpCode.Update); + } + + /// + /// dbname.collectionname + /// + /// The full name of the collection. + public string FullCollectionName { get; set; } + + /// + /// bit vector + /// + /// The flags. + public int Flags { get; set; } + + /// + /// The query to select the document. + /// + /// The selector. + public object Selector { get; set; } + + /// + /// The document data to update with or insert. + /// + /// The document. + public object Document { get; set; } + + /// + /// Writes the body. + /// + /// The writer. + protected override void WriteBody(BsonWriter writer){ + writer.WriteValue(BsonType.Integer, 0); + writer.Write(FullCollectionName, false); + writer.WriteValue(BsonType.Integer, Flags); + writer.WriteObject(Selector); + writer.WriteObject(Document); + } + + /// + /// Calculates the size of the body. + /// + /// The writer. + /// + protected override int CalculateBodySize(BsonWriter writer){ + var size = 4; //first int32 + size += writer.CalculateSize(FullCollectionName, false); + size += 4; //flags + size += writer.CalculateSizeObject(Selector); + size += writer.CalculateSizeObject(Document); + return size; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/QueryOptions.cs b/source/MongoDB/QueryOptions.cs new file mode 100644 index 00000000..71846d98 --- /dev/null +++ b/source/MongoDB/QueryOptions.cs @@ -0,0 +1,27 @@ +namespace MongoDB +{ + /// + /// Query options + /// + /// + /// Oplog replay: 8 (internal replication use only - drivers should not implement) + /// + public enum QueryOptions { + /// + /// None + /// + None = 0, + /// + /// Tailable cursor + /// + TailableCursor = 2, + /// + /// Slave OK + /// + SlaveOK = 4, + /// + /// No cursor timeout + /// + NoCursorTimeout = 16 + } +} \ No newline at end of file diff --git a/source/MongoDB/Results/CommandResultBase.cs b/source/MongoDB/Results/CommandResultBase.cs new file mode 100644 index 00000000..fca21e4c --- /dev/null +++ b/source/MongoDB/Results/CommandResultBase.cs @@ -0,0 +1,40 @@ +using MongoDB.Attributes; + +namespace MongoDB.Results +{ + /// + /// + /// + public abstract class CommandResultBase + { + /// + /// Initializes a new instance of the class. + /// + protected CommandResultBase(){ + ExtendedProperties = new Document(); + } + + /// + /// Gets or sets the server error message. + /// + /// The error message. + [MongoAlias("errmsg")] + public string ErrorMessage { get; set; } + + /// + /// Gets or sets a value indicating whether this is success. + /// + /// true if success; otherwise, false. + [MongoAlias("ok")] + public bool Success { get; set; } + + /// + /// Gets or sets the extended properties. + /// + /// + /// Stores all propertys which are not maped. + /// + /// The extended properties. + public Document ExtendedProperties { get; set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Results/FindAndModifyResult.cs b/source/MongoDB/Results/FindAndModifyResult.cs new file mode 100644 index 00000000..eb944b6b --- /dev/null +++ b/source/MongoDB/Results/FindAndModifyResult.cs @@ -0,0 +1,14 @@ +using MongoDB.Attributes; + +namespace MongoDB.Results +{ + internal class FindAndModifyResult : CommandResultBase + { + /// + /// Gets or sets the value. + /// + /// The value. + [MongoAlias("value")] + public T Value { get; set; } + } +} \ No newline at end of file diff --git a/source/MongoDB/Results/MapReduceResult.cs b/source/MongoDB/Results/MapReduceResult.cs new file mode 100644 index 00000000..ceba4caf --- /dev/null +++ b/source/MongoDB/Results/MapReduceResult.cs @@ -0,0 +1,119 @@ +using System; + +namespace MongoDB.Results +{ + /// + /// + public class MapReduceResult + { + private readonly Document _counts; + private readonly Document _result; + private TimeSpan _timeSpan = TimeSpan.Zero; + + /// + /// Initializes a new instance of the class. + /// + /// The result. + public MapReduceResult(Document result) + { + if(result == null) + throw new ArgumentNullException("result"); + + _result = result; + _counts = (Document)result["counts"]; + } + + /// + /// Gets the name of the collection. + /// + /// The name of the collection. + public string CollectionName + { + get { return (string)_result["result"]; } + } + + /// + /// Gets the input count. + /// + /// The input count. + public long InputCount + { + get { return Convert.ToInt64(_counts["input"]); } + } + + /// + /// Gets the emit count. + /// + /// The emit count. + public long EmitCount + { + get { return Convert.ToInt64(_counts["emit"]); } + } + + /// + /// Gets the output count. + /// + /// The output count. + public long OutputCount + { + get { return Convert.ToInt64(_counts["output"]); } + } + + /// + /// Gets the time. + /// + /// The time. + public long Time + { + get { return Convert.ToInt64(_result["timeMillis"]); } + } + + /// + /// Gets the time span. + /// + /// The time span. + public TimeSpan TimeSpan + { + get + { + if(_timeSpan == TimeSpan.Zero) + _timeSpan = TimeSpan.FromMilliseconds(Time); + return _timeSpan; + } + } + + /// + /// Gets a value indicating whether this is ok. + /// + /// true if ok; otherwise, false. + public Boolean Ok + { + get { return (Convert.ToBoolean(_result["ok"])); } + } + + /// + /// Gets the error message. + /// + /// The error message. + public String ErrorMessage + { + get + { + if(_result.ContainsKey("msg")) + return (String)_result["msg"]; + return String.Empty; + } + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return _result.ToString(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/BsonClassMapBuilder.cs b/source/MongoDB/Serialization/BsonClassMapBuilder.cs new file mode 100644 index 00000000..38f3d162 --- /dev/null +++ b/source/MongoDB/Serialization/BsonClassMapBuilder.cs @@ -0,0 +1,76 @@ +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Configuration.Mapping; +using MongoDB.Serialization.Builders; + +namespace MongoDB.Serialization +{ + internal class BsonClassMapBuilder : IBsonObjectBuilder + { + private bool _isDictionary; + private readonly Stack _types; + private readonly IMappingStore _mappingStore; + + public BsonClassMapBuilder(IMappingStore mappingStore, Type classType) + { + _mappingStore = mappingStore; + _types = new Stack(); + _types.Push(classType); + } + + public object BeginObject() + { + if (_isDictionary) + { + _isDictionary = false; + return new DictionaryBuilder(_types.Peek()); + } + + if (_types.Peek() == null || _types.Peek() == typeof(Document)) + return new DocumentBuilder(); + + var classMap = _mappingStore.GetClassMap(_types.Peek()); + if (classMap.IsPolymorphic) + { + //until we have the discriminator, we can't instantiate the type. + return new PolymorphicClassMapBuilder(classMap); + } + + return new ConcreteClassMapBuilder(classMap); + } + + public object EndObject(object instance) + { + return ((IObjectBuilder)instance).BuildObject(); + } + + public object BeginArray() + { + return new ArrayBuilder(_types.Peek()); + } + + public object EndArray(object instance) + { + return ((IObjectBuilder)instance).BuildObject(); + } + + public void BeginProperty(object instance, string name) + { + var propDescriptor = ((IObjectBuilder)instance).GetPropertyDescriptor(name); + if (propDescriptor == null) + _types.Push(null); + else + { + _types.Push(propDescriptor.Type); + _isDictionary = propDescriptor.IsDictionary; + } + } + + public void EndProperty(object instance, string name, object value) + { + _types.Pop(); + ((IObjectBuilder)instance).AddProperty(name, value); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/BsonClassMapDescriptor.cs b/source/MongoDB/Serialization/BsonClassMapDescriptor.cs new file mode 100644 index 00000000..7e5ff47e --- /dev/null +++ b/source/MongoDB/Serialization/BsonClassMapDescriptor.cs @@ -0,0 +1,123 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Configuration.Mapping; +using MongoDB.Serialization.Descriptors; + +namespace MongoDB.Serialization +{ + internal class BsonClassMapDescriptor : IBsonObjectDescriptor + { + private readonly Stack _types; + private readonly IMappingStore _mappingStore; + private bool _isDictionaryProperty; + + public BsonClassMapDescriptor(IMappingStore mappingStore, Type rootType) + { + if (mappingStore == null) + throw new ArgumentNullException("mappingStore"); + if (rootType == null) + throw new ArgumentNullException("rootType"); + + _isDictionaryProperty = false; + _mappingStore = mappingStore; + _types = new Stack(); + _types.Push(rootType); + } + + public object BeginObject(object instance) + { + if (instance is Document) + return BeginDocument((Document)instance); + + var currentClassMap = _mappingStore.GetClassMap(_types.Peek()); + var instanceType = instance.GetType(); + if (!currentClassMap.ClassType.IsAssignableFrom(instanceType)) + return new ExampleClassMapPropertyDescriptor(_mappingStore, currentClassMap, instance); + + if (currentClassMap.ClassType != instanceType) //we are a subclass + currentClassMap = _mappingStore.GetClassMap(instanceType); + + return new ClassMapPropertyDescriptor(_mappingStore, currentClassMap, instance); + } + + public object BeginArray(object instance) + { + return new ArrayDescriptor((IEnumerable)instance, _types.Peek()); + } + public IEnumerable GetProperties(object instance) + { + return ((IPropertyDescriptor)instance).GetProperties(); + } + + public void BeginProperty(object instance, BsonProperty property) + { + var value = property.Value as BsonPropertyValue; + if (value == null) + return; + + _isDictionaryProperty = value.IsDictionary; + _types.Push(value.Type); + property.Value = value.Value; + } + + public void EndProperty(object instance, BsonProperty property) + { + _types.Pop(); + } + + public void EndArray(object instance) + { } + + public void EndObject(object instance) + { } + + public bool IsArray(object instance) + { + if (instance is Document) + return false; + + return instance is IEnumerable; + } + + public bool IsObject(object instance) + { + return !IsNativeToMongo(instance.GetType()); + } + + private object BeginDocument(Document document) + { + if (_isDictionaryProperty) + { + _isDictionaryProperty = false; + return new DictionaryPropertyDescriptor(document, _types.Peek()); + } + if (typeof(Document).IsAssignableFrom(_types.Peek())) + return new DocumentPropertyDescriptor(document); + + var currentClassMap = _mappingStore.GetClassMap(_types.Peek()); + + return new DocumentClassMapPropertyDescriptor(_mappingStore, currentClassMap, document); + } + + private static bool IsNativeToMongo(Type type) + { + var typeCode = Type.GetTypeCode(type); + + if (typeCode != TypeCode.Object) + return true; + + if (type == typeof(Guid)) + return true; + + if (type == typeof(Oid)) + return true; + + if (type == typeof(byte[])) + return true; + + return false; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/ArrayBuilder.cs b/source/MongoDB/Serialization/Builders/ArrayBuilder.cs new file mode 100644 index 00000000..c30eab8e --- /dev/null +++ b/source/MongoDB/Serialization/Builders/ArrayBuilder.cs @@ -0,0 +1,117 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Serialization.Builders +{ + internal class ArrayBuilder : IObjectBuilder + { + private readonly List _elements; + private readonly Type _elementType; + + /// + /// Initializes a new instance of the class. + /// + /// Type of the element. + public ArrayBuilder(Type elementType) + { + _elements = new List(); + _elementType = elementType; + } + /// + /// Adds the property. + /// + /// The name. + /// The value. + public void AddProperty(string name, object value) + { + _elements.Add(value); + } + + /// + /// Builds the object. + /// + /// + public object BuildObject() + { + if(IsDocumentArray) + return GetTypedList(); + + return _elements.ToArray(); + } + + /// + /// Gets the typed list. + /// + /// + private object GetTypedList() + { + var type = GetResultListType(); + + return type == typeof(object) ? _elements : CreateTypedList(type); + } + + /// + /// Gets the type of the property. + /// + /// The name. + /// + public PropertyDescriptor GetPropertyDescriptor(string name) + { + return new PropertyDescriptor { Type = _elementType }; + } + + /// + /// Gets a value indicating whether this instance is document array. + /// + /// + /// true if this instance is document array; otherwise, false. + /// + private bool IsDocumentArray{ + get{ return _elementType == null;} + } + + /// + /// Gets the type of the result list. + /// + /// + private Type GetResultListType() + { + //Todo: compare the inheritance tree up to find the most common + if(_elements.Count == 0) + return typeof(object); + + Type commonType = null; + + foreach(var objType in from obj in _elements + where obj != null + select obj.GetType()) + { + if(commonType == null) + commonType = objType; + else if(commonType != objType) + return typeof(object); + } + + return commonType; + } + + /// + /// Creates the typed list. + /// + /// The type. + /// + private object CreateTypedList(Type type) + { + var listType = typeof(List<>).MakeGenericType(type); + + var list = (IList)Activator.CreateInstance(listType); + + foreach(var obj in _elements) + list.Add(obj); + + return list; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/ConcreteClassMapBuilder.cs b/source/MongoDB/Serialization/Builders/ConcreteClassMapBuilder.cs new file mode 100644 index 00000000..44a95db8 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/ConcreteClassMapBuilder.cs @@ -0,0 +1,62 @@ +using System; + +using MongoDB.Configuration.Mapping.Model; +using System.Collections.Generic; + +namespace MongoDB.Serialization.Builders +{ + internal class ConcreteClassMapBuilder : IObjectBuilder + { + private readonly IClassMap _classMap; + private readonly object _instance; + private readonly IDictionary _extendedProperties; + + public ConcreteClassMapBuilder(IClassMap classMap) + { + _classMap = classMap; + _instance = classMap.CreateInstance(); + + if(!_classMap.HasExtendedProperties) + return; + + var extPropType = _classMap.ExtendedPropertiesMap.MemberReturnType; + if (extPropType == typeof(IDictionary)) + extPropType = typeof(Dictionary); + _extendedProperties = (IDictionary)Activator.CreateInstance(extPropType); + _classMap.ExtendedPropertiesMap.SetValue(_instance, _extendedProperties); + } + + public void AddProperty(string name, object value) + { + var memberMap = _classMap.GetMemberMapFromAlias(name); + if (memberMap != null) + memberMap.SetValue(_instance, value); + else if ((!_classMap.HasDiscriminator || _classMap.DiscriminatorAlias != name) && _extendedProperties != null) + _extendedProperties.Add(name, value); + } + + public object BuildObject() + { + return _instance; + } + + public PropertyDescriptor GetPropertyDescriptor(string name) + { + var memberMap = _classMap.GetMemberMapFromAlias(name); + if (memberMap == null) + return null; + + var type = memberMap.MemberReturnType; + var isDictionary = false; + if (memberMap is CollectionMemberMap) + type = ((CollectionMemberMap)memberMap).ElementType; + else if (memberMap is DictionaryMemberMap) + { + type = ((DictionaryMemberMap)memberMap).ValueType; + isDictionary = true; + } + + return new PropertyDescriptor { Type = type, IsDictionary = isDictionary }; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/DictionaryBuilder.cs b/source/MongoDB/Serialization/Builders/DictionaryBuilder.cs new file mode 100644 index 00000000..a191ad76 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/DictionaryBuilder.cs @@ -0,0 +1,52 @@ +using System; + +namespace MongoDB.Serialization.Builders +{ + /// + /// + /// + public class DictionaryBuilder : IObjectBuilder + { + private readonly Document _document; + private readonly Type _valueType; + + /// + /// Initializes a new instance of the class. + /// + /// Type of the value. + public DictionaryBuilder(Type valueType) + { + _document = new Document(); + _valueType = valueType; + } + + /// + /// Adds the property. + /// + /// The name. + /// The value. + public void AddProperty(string name, object value) + { + _document.Add(name, value); + } + + /// + /// Builds the object. + /// + /// + public object BuildObject() + { + return _document; + } + + /// + /// Gets the property descriptor. + /// + /// The name. + /// + public PropertyDescriptor GetPropertyDescriptor(string name) + { + return new PropertyDescriptor { Type = _valueType, IsDictionary = false }; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/DocumentBuilder.cs b/source/MongoDB/Serialization/Builders/DocumentBuilder.cs new file mode 100644 index 00000000..d048f9f8 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/DocumentBuilder.cs @@ -0,0 +1,44 @@ +namespace MongoDB.Serialization.Builders +{ + internal class DocumentBuilder : IObjectBuilder + { + private readonly Document _document; + + /// + /// Initializes a new instance of the class. + /// + public DocumentBuilder() + { + _document = new Document(); + } + + /// + /// Adds the property. + /// + /// The name. + /// The value. + public void AddProperty(string name, object value) + { + _document.Add(name, value); + } + + /// + /// Builds the object. + /// + /// + public object BuildObject() + { + return _document; + } + + /// + /// Gets the type of the property. + /// + /// The name. + /// + public PropertyDescriptor GetPropertyDescriptor(string name) + { + return new PropertyDescriptor(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/IObjectBuilder.cs b/source/MongoDB/Serialization/Builders/IObjectBuilder.cs new file mode 100644 index 00000000..7f0229a5 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/IObjectBuilder.cs @@ -0,0 +1,11 @@ +namespace MongoDB.Serialization.Builders +{ + internal interface IObjectBuilder + { + void AddProperty(string name, object value); + + object BuildObject(); + + PropertyDescriptor GetPropertyDescriptor(string name); + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/PolymorphicClassMapBuilder.cs b/source/MongoDB/Serialization/Builders/PolymorphicClassMapBuilder.cs new file mode 100644 index 00000000..87807b54 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/PolymorphicClassMapBuilder.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; + +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Serialization.Builders +{ + internal class PolymorphicClassMapBuilder : IObjectBuilder + { + private IClassMap _classMap; + private ConcreteClassMapBuilder _concreteEntityBuilder; + private readonly Dictionary _properties; + + public PolymorphicClassMapBuilder(IClassMap classMap) + { + _classMap = classMap; + _properties = new Dictionary(); + } + + public void AddProperty(string name, object value) + { + if (_concreteEntityBuilder != null) + _concreteEntityBuilder.AddProperty(name, value); + else if (_classMap.DiscriminatorAlias == name) + { + //we have found our discriminator and *can* instantiate our type + _classMap = _classMap.GetClassMapFromDiscriminator(value); + _concreteEntityBuilder = new ConcreteClassMapBuilder(_classMap); + foreach (var pair in _properties) + _concreteEntityBuilder.AddProperty(pair.Key, pair.Value); + + _properties.Clear(); + } + else + _properties.Add(name, value); + } + + public object BuildObject() + { + if (_concreteEntityBuilder == null) + { + //we'll assume that this is the root class in the hierarchy. + _concreteEntityBuilder = new ConcreteClassMapBuilder(_classMap); + foreach (var pair in _properties) + _concreteEntityBuilder.AddProperty(pair.Key, pair.Value); + } + + return _concreteEntityBuilder.BuildObject(); + } + + public PropertyDescriptor GetPropertyDescriptor(string name) + { + var memberMap = _classMap.GetMemberMapFromAlias(name); + if (memberMap == null) + return null; + + var type = memberMap.MemberReturnType; + bool isDictionary = false; + if (memberMap is CollectionMemberMap) + type = ((CollectionMemberMap)memberMap).ElementType; + else if (memberMap is DictionaryMemberMap) + { + type = ((DictionaryMemberMap)memberMap).ValueType; + isDictionary = true; + } + + return new PropertyDescriptor { Type = type, IsDictionary = isDictionary }; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Builders/PropertyDescriptor.cs b/source/MongoDB/Serialization/Builders/PropertyDescriptor.cs new file mode 100644 index 00000000..3d09cf18 --- /dev/null +++ b/source/MongoDB/Serialization/Builders/PropertyDescriptor.cs @@ -0,0 +1,24 @@ +using System; + +namespace MongoDB.Serialization.Builders +{ + /// + /// + /// + public class PropertyDescriptor + { + /// + /// Gets or sets the type. + /// + /// The type. + public Type Type { get; set; } + + /// + /// Gets or sets a value indicating whether this instance is dictionary. + /// + /// + /// true if this instance is dictionary; otherwise, false. + /// + public bool IsDictionary { get; set; } + } +} diff --git a/source/MongoDB/Serialization/ClassMapObjectDescriptorAdapter.cs b/source/MongoDB/Serialization/ClassMapObjectDescriptorAdapter.cs new file mode 100644 index 00000000..6cfaf487 --- /dev/null +++ b/source/MongoDB/Serialization/ClassMapObjectDescriptorAdapter.cs @@ -0,0 +1,77 @@ +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Model; + +namespace MongoDB.Serialization +{ + internal class ClassMapObjectDescriptorAdapter : IObjectDescriptor + { + private readonly IClassMap _classMap; + + /// + /// Initializes a new instance of the class. + /// + /// The class map. + public ClassMapObjectDescriptorAdapter(IClassMap classMap) + { + _classMap = classMap; + } + + /// + /// Generates the id. + /// + /// The instance. + /// + public object GenerateId(object instance){ + return !_classMap.HasId ? null : _classMap.IdMap.Generate(instance); + } + + /// + /// Gets the mongo property names. + /// + /// The instance. + /// + public IEnumerable GetMongoPropertyNames(object instance) + { + if (_classMap.HasId) + yield return _classMap.IdMap.Alias; + + if (_classMap.IsSubClass) + yield return _classMap.DiscriminatorAlias; + + foreach (var memberMap in _classMap.MemberMaps) + yield return memberMap.Alias; + } + + /// + /// Gets the property value. + /// + /// The instance. + /// Name of the mongo. + /// + public object GetPropertyValue(object instance, string mongoName) + { + //not sure if this is necessary... + //if (_classMap.HasDiscriminator && _classMap.DiscriminatorAlias == mongoName) + // return _classMap.Discriminator; + + if (!_classMap.HasId && mongoName == "_id") + return null; + + return _classMap.GetMemberMapFromAlias(mongoName).GetValue(instance); + } + + /// + /// Sets the property value. + /// + /// The instance. + /// Name of the mongo. + /// The value. + public void SetPropertyValue(object instance, string mongoName, object value) + { + if (!_classMap.HasId && mongoName == "_id") //there is nothing for us to set and we'll let the database do the id generation... + return; + + _classMap.GetMemberMapFromAlias(mongoName).SetValue(instance, value); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/ArrayDescriptor.cs b/source/MongoDB/Serialization/Descriptors/ArrayDescriptor.cs new file mode 100644 index 00000000..de5c6a84 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/ArrayDescriptor.cs @@ -0,0 +1,57 @@ +using System; +using System.Collections.Generic; +using System.Collections; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal class ArrayDescriptor : IPropertyDescriptor + { + private readonly Type _elementType; + private readonly IEnumerable _enumerable; + + /// + /// Initializes a new instance of the class. + /// + /// The enumerable. + /// Type of the element. + public ArrayDescriptor(IEnumerable enumerable, Type elementType) + { + if (enumerable == null) + throw new ArgumentNullException("enumerable"); + if (elementType == null) + throw new ArgumentNullException("elementType"); + + _elementType = elementType; + _enumerable = enumerable; + } + + /// + /// Gets the properties. + /// + /// + public IEnumerable GetProperties() + { + int i = 0; + foreach (var element in _enumerable) + { + yield return new BsonProperty(i.ToString()) { Value = GetValue(element) }; + i++; + } + } + + /// + /// Gets the value. + /// + /// The value. + /// + private BsonPropertyValue GetValue(object value) + { + var type = _elementType ?? (value == null ? null : value.GetType()); + + return new BsonPropertyValue(type, value, false); + } + + + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/BsonPropertyValue.cs b/source/MongoDB/Serialization/Descriptors/BsonPropertyValue.cs new file mode 100644 index 00000000..94d27c88 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/BsonPropertyValue.cs @@ -0,0 +1,20 @@ +using System; + +namespace MongoDB.Serialization.Descriptors +{ + internal class BsonPropertyValue + { + public bool IsDictionary { get; private set; } + + public Type Type { get; private set; } + + public object Value { get; private set; } + + public BsonPropertyValue(Type type, object value, bool isDictionary) + { + Type = type; + Value = value; + IsDictionary = isDictionary; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptor.cs new file mode 100644 index 00000000..b2280308 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptor.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Model; +using MongoDB.Configuration.Mapping; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal class ClassMapPropertyDescriptor : ClassMapPropertyDescriptorBase + { + private readonly object _instance; + private readonly IDictionary _extendedProperties; + + /// + /// Initializes a new instance of the class. + /// + /// The mapping store. + /// The class map. + /// The instance. + public ClassMapPropertyDescriptor(IMappingStore mappingStore, IClassMap classMap, object instance) + : base(mappingStore, classMap) + { + if (instance == null) + throw new ArgumentNullException("instance"); + + _instance = instance; + if (ClassMap.HasExtendedProperties) + _extendedProperties = (IDictionary)ClassMap.ExtendedPropertiesMap.GetValue(instance); + } + + /// + /// Gets the property names. + /// + /// + public override IEnumerable GetProperties() + { + if (ClassMap.HasId) + yield return CreateProperty(ClassMap.IdMap.Alias, ClassMap.IdMap.MemberReturnType, ClassMap.GetId(_instance), false); + + if (ShouldAddDiscriminator()) + yield return CreateProperty(ClassMap.DiscriminatorAlias, ClassMap.Discriminator.GetType(), ClassMap.Discriminator, false); + + foreach (var memberMap in ClassMap.MemberMaps) + { + var value = GetValue(memberMap.MemberName); + if (!memberMap.PersistDefaultValue && object.Equals(memberMap.DefaultValue, value)) + continue; + yield return CreateProperty(memberMap.Alias, value); + } + + if (_extendedProperties == null) + yield break; + + foreach(var propertyName in _extendedProperties.Keys) + yield return CreateProperty(propertyName, GetValue(propertyName)); + } + + /// + /// Gets the value. + /// + /// The name. + /// + private BsonPropertyValue GetValue(string name) + { + if (ClassMap.DiscriminatorAlias == name && ShouldAddDiscriminator()) + return new BsonPropertyValue(ClassMap.Discriminator.GetType(), ClassMap.Discriminator, false); + + object value; + + var memberMap = GetAliasFromMemberName(name).MemberMap; + if(memberMap != null) + value = memberMap.GetValue(_instance); + else if (_extendedProperties != null) + value = _extendedProperties[name]; + else + throw new InvalidOperationException("Attempting to get a property that does not exist."); + + var type = typeof(Document); + bool isDictionary = false; + if (memberMap != null) + { + type = memberMap.MemberReturnType; + if (memberMap is CollectionMemberMap) + type = ((CollectionMemberMap)memberMap).ElementType; + else if (memberMap is DictionaryMemberMap) + { + type = ((DictionaryMemberMap)memberMap).ValueType; + isDictionary = true; + } + } + else if (value != null) + type = value.GetType(); + + return new BsonPropertyValue(type, value, isDictionary); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptorBase.cs b/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptorBase.cs new file mode 100644 index 00000000..5aa6123b --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/ClassMapPropertyDescriptorBase.cs @@ -0,0 +1,260 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Configuration.Mapping.Model; +using MongoDB.Configuration.Mapping; +using System.Text; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + /// + /// + /// + internal abstract class ClassMapPropertyDescriptorBase : IPropertyDescriptor + { + private readonly IMappingStore _mappingStore; + private readonly JavascriptMemberNameReplacer _codeReplacer; + /// + /// + /// + protected readonly IClassMap ClassMap; + + /// + /// Initializes a new instance of the class. + /// + /// The mapping store. + /// The class map. + protected ClassMapPropertyDescriptorBase(IMappingStore mappingStore, IClassMap classMap) + { + if (mappingStore == null) + throw new ArgumentNullException("mappingStore"); + if (classMap == null) + throw new ArgumentNullException("classMap"); + + _mappingStore = mappingStore; + ClassMap = classMap; + _codeReplacer = new JavascriptMemberNameReplacer(_mappingStore); + } + + /// + /// Gets the properties. + /// + /// + public abstract IEnumerable GetProperties(); + + /// + /// Creates the property. + /// + /// The alias. + /// Type of the value. + /// The value. + /// if set to true [is dictionary]. + /// + protected BsonProperty CreateProperty(string alias, Type valueType, object value, bool isDictionary) + { + return CreateProperty(alias, new BsonPropertyValue(valueType, value, isDictionary)); + } + + /// + /// Creates the property. + /// + /// The alias. + /// The value. + /// + protected BsonProperty CreateProperty(string alias, BsonPropertyValue value) + { + return new BsonProperty(alias) { Value = value }; + } + + /// + /// Shoulds the persist discriminator. + /// + /// + protected bool ShouldAddDiscriminator() + { + return ClassMap.IsSubClass; + } + + /// + /// Gets the name of the alias from member. + /// + /// The name. + /// + protected MemberMapAndAlias GetAliasFromMemberName(string name) + { + var memberMap = ClassMap.GetMemberMapFromMemberName(name); + if (memberMap != null) + return new MemberMapAndAlias() { MemberMap = memberMap, Alias = memberMap.Alias }; + + if (!name.Contains(".")) + return new MemberMapAndAlias() { Alias = name }; + + var sb = new StringBuilder(); + + var parts = name.Split('.'); + memberMap = ClassMap.GetMemberMapFromMemberName(parts[0]); + if (memberMap == null) + return new MemberMapAndAlias() { Alias = name }; + + sb.Append(memberMap.Alias); + var currentType = memberMap.MemberReturnType; + for (int i = 1; i < parts.Length; i++) + { + if(memberMap != null) + { + var collectionMemberMap = memberMap as CollectionMemberMap; + if (collectionMemberMap != null) + { + currentType = ((CollectionMemberMap)memberMap).ElementType; + if (IsNumeric(parts[i])) //we are an array indexer + { + sb.Append(".").Append(parts[i]); + continue; + } + } + + var classMap = _mappingStore.GetClassMap(currentType); + memberMap = classMap.GetMemberMapFromMemberName(parts[i]); + } + + if (memberMap == null) + sb.Append(".").Append(parts[i]); + else + { + sb.Append(".").Append(memberMap.Alias); + currentType = memberMap.MemberReturnType; + } + } + + return new MemberMapAndAlias() { MemberMap = memberMap, Alias = sb.ToString() }; + } + + protected string TranslateJavascript(string code) + { + return _codeReplacer.Replace(code, ClassMap); + } + + private static bool IsNumeric(IEnumerable str) + { + return str.All(t => char.IsDigit(t)); + } + + protected class MemberMapAndAlias + { + public string Alias { get; set; } + + public PersistentMemberMap MemberMap { get; set; } + } + + /// + /// This is an extremely rudimentary lexer designed solely for efficiency. + /// + private class JavascriptMemberNameReplacer + { + private const char EOF = '\0'; + private readonly IMappingStore _mappingStore; + private IClassMap _classMap; + private string _input; + private int _position; + private StringBuilder _output; + + private char Current + { + get + { + if (_position >= _input.Length) + return EOF; + + return _input[_position]; + } + } + + public JavascriptMemberNameReplacer(IMappingStore mappingStore) + { + _mappingStore = mappingStore; + } + + public string Replace(string input, IClassMap classMap) + { + _classMap = classMap; + _input = input; + _output = new StringBuilder(); + _position = 0; + while (Read()){} + return _output.ToString(); + } + + private bool Read() + { + if (ReadChar(true) == 't' && ReadChar(true) == 'h' && ReadChar(true) == 'i' && ReadChar(true) == 's' && ReadChar(true) == '.') + { + MatchMembers(); + } + return Current != EOF; + } + + private char ReadChar(bool includeInOutput) + { + char c = Current; + _position++; + if(c != EOF && includeInOutput) + _output.Append(c); + return c; + } + + private void MatchMembers() + { + Type currentType = _classMap.ClassType; + Member: + string memberName = MatchMember(); + var classMap = _mappingStore.GetClassMap(currentType); + var memberMap = classMap.GetMemberMapFromMemberName(memberName); + if (memberMap == null) + { + _output.Append(memberName); + return; + } + + _output.Append(memberMap.Alias); + currentType = memberMap.MemberReturnType; + + var c = ReadChar(true); + if (c == '[') + { + MatchIndexer(); + if (memberMap is CollectionMemberMap) + currentType = ((CollectionMemberMap)memberMap).ElementType; + c = ReadChar(true); + } + + if (c == '.') + goto Member; + } + + private string MatchMember() + { + StringBuilder memberName = new StringBuilder(); + char c = Current; + while (Char.IsLetterOrDigit(c) || c == '_' || c == '$') + { + ReadChar(false); + memberName.Append(c); + c = Current; + } + + return memberName.ToString(); + } + + private void MatchIndexer() + { + var c = ReadChar(true); + while (c != ']' && c != EOF) + { + c = ReadChar(true); + } + } + + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/DictionaryPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/DictionaryPropertyDescriptor.cs new file mode 100644 index 00000000..482969d9 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/DictionaryPropertyDescriptor.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + /// + /// + /// + public class DictionaryPropertyDescriptor : IPropertyDescriptor + { + private readonly Document _document; + private readonly Type _valueType; + + /// + /// Initializes a new instance of the class. + /// + /// The document. + /// Type of the value. + public DictionaryPropertyDescriptor(Document document, Type valueType) + { + _document = document; + _valueType = valueType; + } + + /// + /// Gets the properties. + /// + /// + public IEnumerable GetProperties() + { + return _document.Select(e => new BsonProperty(e.Key) { Value = new BsonPropertyValue(_valueType, e.Value, false) }); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/DocumentClassMapPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/DocumentClassMapPropertyDescriptor.cs new file mode 100644 index 00000000..8a03a17b --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/DocumentClassMapPropertyDescriptor.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using MongoDB.Configuration.Mapping.Model; +using MongoDB.Configuration.Mapping; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal class DocumentClassMapPropertyDescriptor : ClassMapPropertyDescriptorBase + { + private readonly Document _document; + + /// + /// Initializes a new instance of the class. + /// + /// The mapping store. + /// The class map. + /// The document. + public DocumentClassMapPropertyDescriptor(IMappingStore mappingStore, IClassMap classMap, Document document) + : base(mappingStore, classMap) + { + if (document == null) + throw new ArgumentNullException("document"); + + _document = document; + } + + /// + /// Gets the property names. + /// + /// + public override IEnumerable GetProperties() + { + if(ShouldAddDiscriminator()) + { + if (_document.ContainsKey("count")) //this is a special case + { + var queryDoc = _document["query"] as Document; + if (queryDoc == null) + { + //TODO: implement someway of shoving this value into the doc... + throw new NotSupportedException("Count queries on subclasses using anonymous types is not supported."); + } + else if (!queryDoc.ContainsKey(ClassMap.DiscriminatorAlias)) + queryDoc.Append(ClassMap.DiscriminatorAlias, ClassMap.Discriminator); + } + else + yield return CreateProperty(ClassMap.DiscriminatorAlias, ClassMap.Discriminator.GetType(), ClassMap.Discriminator, false); + } + + foreach (string key in _document.Keys) + { + var alias = GetAliasFromMemberName(key); + var valueAndType = GetValue(key); + if (alias.MemberMap != null && !alias.MemberMap.PersistDefaultValue && object.Equals(alias.MemberMap.DefaultValue, valueAndType.Value)) + continue; + yield return CreateProperty(alias.Alias, valueAndType); + } + } + + /// + /// Gets the value. + /// + /// The name. + /// + private BsonPropertyValue GetValue(string name) + { + if (ClassMap.DiscriminatorAlias == name && ShouldAddDiscriminator()) + return new BsonPropertyValue(ClassMap.Discriminator.GetType(), ClassMap.Discriminator, false); + + var value = _document[name]; + if (value != null && typeof(Code).IsAssignableFrom(value.GetType())) + { + Code code = (Code)value; + code.Value = TranslateJavascript(code.Value); + return new BsonPropertyValue(typeof(Code), code, false); + } + + var memberMap = GetAliasFromMemberName(name).MemberMap; + var type = typeof(Document); + bool isDictionary = false; + + if (memberMap != null) + { + type = memberMap.MemberReturnType; + if (memberMap is CollectionMemberMap) + type = ((CollectionMemberMap)memberMap).ElementType; + else if (memberMap is DictionaryMemberMap) + { + type = ((DictionaryMemberMap)memberMap).ValueType; + isDictionary = true; + } + } + else if (name.StartsWith("$") || name == "query" || name == "orderby") //we are a modifier, a special case of querying, or order fields + type = ClassMap.ClassType; //we'll pass this along so that the fields get replaced correctly... + else if (value != null) + type = value.GetType(); + + return new BsonPropertyValue(type, value, isDictionary); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/DocumentPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/DocumentPropertyDescriptor.cs new file mode 100644 index 00000000..b17dacad --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/DocumentPropertyDescriptor.cs @@ -0,0 +1,46 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal class DocumentPropertyDescriptor : IPropertyDescriptor + { + private readonly Document _document; + + /// + /// Initializes a new instance of the class. + /// + /// The document. + public DocumentPropertyDescriptor(Document document) + { + if (document == null) + throw new ArgumentNullException("document"); + _document = document; + } + + /// + /// Gets the properties. + /// + /// + public IEnumerable GetProperties() + { + return _document.Select(pair => new BsonProperty(pair.Key) + { + Value = GetValue(pair.Value) + }); + } + + /// + /// Gets the value. + /// + /// The value. + /// + private BsonPropertyValue GetValue(object value) + { + var valueType = value == null ? typeof(Document) : value.GetType(); + return new BsonPropertyValue(valueType, value, false); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/ExampleClassMapPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/ExampleClassMapPropertyDescriptor.cs new file mode 100644 index 00000000..df6c1bd4 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/ExampleClassMapPropertyDescriptor.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Reflection; +using MongoDB.Configuration.Mapping.Model; +using MongoDB.Configuration.Mapping; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal class ExampleClassMapPropertyDescriptor : ClassMapPropertyDescriptorBase + { + private readonly object _example; + private readonly Type _exampleType; + + /// + /// Initializes a new instance of the class. + /// + /// The mapping store. + /// The class map. + /// The example. + public ExampleClassMapPropertyDescriptor(IMappingStore mappingStore, IClassMap classMap, object example) + : base(mappingStore, classMap) + { + if (example == null) + throw new ArgumentNullException("example"); + + _example = example; + _exampleType = _example.GetType(); + } + + /// + /// Gets the property names. + /// + /// + public override IEnumerable GetProperties() + { + if (ShouldAddDiscriminator()) + yield return CreateProperty(ClassMap.DiscriminatorAlias, ClassMap.Discriminator.GetType(), ClassMap.Discriminator, false); + + foreach (PropertyInfo propertyInfo in _exampleType.GetProperties()) + { + var alias = GetAliasFromMemberName(propertyInfo.Name); + var value = GetValue(propertyInfo); + if (alias.MemberMap != null && !alias.MemberMap.PersistDefaultValue && object.Equals(alias.MemberMap.DefaultValue, value)) + continue; + + yield return CreateProperty(alias.Alias, value); + } + } + + /// + /// Gets the value. + /// + /// The property info. + /// + private BsonPropertyValue GetValue(PropertyInfo propertyInfo) + { + Type type = null; + var value = propertyInfo.GetValue(_example, null); + if (value != null && typeof(Code).IsAssignableFrom(value.GetType())) + { + Code code = (Code)value; + code.Value = TranslateJavascript(code.Value); + return new BsonPropertyValue(typeof(Code), code, false); + } + + bool isDictionary = false; + var memberMap = GetAliasFromMemberName(propertyInfo.Name).MemberMap; + if (memberMap != null) + { + if (memberMap is CollectionMemberMap) + type = ((CollectionMemberMap)memberMap).ElementType; + else if (memberMap is DictionaryMemberMap) + { + type = ((DictionaryMemberMap)memberMap).ValueType; + isDictionary = true; + } + + if (type == null || type == typeof(object)) + type = memberMap.MemberReturnType; + } + else + type = propertyInfo.PropertyType; + + return new BsonPropertyValue(type, value, isDictionary); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/Descriptors/IPropertyDescriptor.cs b/source/MongoDB/Serialization/Descriptors/IPropertyDescriptor.cs new file mode 100644 index 00000000..4c7ff013 --- /dev/null +++ b/source/MongoDB/Serialization/Descriptors/IPropertyDescriptor.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; +using MongoDB.Bson; + +namespace MongoDB.Serialization.Descriptors +{ + internal interface IPropertyDescriptor + { + IEnumerable GetProperties(); + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/DocumentObjectDescriptorAdapter.cs b/source/MongoDB/Serialization/DocumentObjectDescriptorAdapter.cs new file mode 100644 index 00000000..9f50cdc2 --- /dev/null +++ b/source/MongoDB/Serialization/DocumentObjectDescriptorAdapter.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; + +namespace MongoDB.Serialization +{ + /// + /// + /// + public class DocumentObjectDescriptorAdapter : IObjectDescriptor + { + /// + /// Generates the id. + /// + /// The instance. + /// + public object GenerateId(object instance) + { + return Oid.NewOid(); + } + + /// + /// Gets the property value. + /// + /// The instance. + /// Name of the mongo. + /// + public object GetPropertyValue(object instance, string mongoName) + { + return ((Document)instance)[mongoName]; + } + + /// + /// Sets the property value. + /// + /// The instance. + /// Name of the mongo. + /// The value. + public void SetPropertyValue(object instance, string mongoName, object value) + { + ((Document)instance)[mongoName] = value; + } + + /// + /// Gets the mongo property names. + /// + /// The instance. + /// + public IEnumerable GetMongoPropertyNames(object instance) + { + return ((Document)instance).Keys; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/IObjectDescriptor.cs b/source/MongoDB/Serialization/IObjectDescriptor.cs new file mode 100644 index 00000000..b857b871 --- /dev/null +++ b/source/MongoDB/Serialization/IObjectDescriptor.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; + +namespace MongoDB.Serialization +{ + /// + /// + /// + public interface IObjectDescriptor + { + /// + /// Generates the id. + /// + /// The instance. + /// + object GenerateId(object instance); + + /// + /// Gets the property value. + /// + /// The instance. + /// Name of the mongo. + /// + object GetPropertyValue(object instance, string mongoName); + + /// + /// Sets the property value. + /// + /// The instance. + /// Name of the mongo. + /// The value. + void SetPropertyValue(object instance, string mongoName, object value); + + /// + /// Gets the mongo property names. + /// + /// The instance. + /// + IEnumerable GetMongoPropertyNames(object instance); + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/ISerializationFactory.cs b/source/MongoDB/Serialization/ISerializationFactory.cs new file mode 100644 index 00000000..008eaa00 --- /dev/null +++ b/source/MongoDB/Serialization/ISerializationFactory.cs @@ -0,0 +1,39 @@ +using System; +using MongoDB.Bson; + +namespace MongoDB.Serialization +{ + /// + /// + /// + public interface ISerializationFactory + { + /// + /// Gets the bson writer settings. + /// + /// Type of the root. + /// + BsonWriterSettings GetBsonWriterSettings(Type rootType); + + /// + /// Gets the name of the collection given the rootType. + /// + /// Type of the root. + /// + string GetCollectionName(Type rootType); + + /// + /// Gets the object descriptor. + /// + /// The type. + /// + IObjectDescriptor GetObjectDescriptor(Type type); + + /// + /// Gets the bson reader settings. + /// + /// Type of the root. + /// + BsonReaderSettings GetBsonReaderSettings(Type rootType); + } +} \ No newline at end of file diff --git a/source/MongoDB/Serialization/SerializationFactory.cs b/source/MongoDB/Serialization/SerializationFactory.cs new file mode 100644 index 00000000..cb4a7fde --- /dev/null +++ b/source/MongoDB/Serialization/SerializationFactory.cs @@ -0,0 +1,105 @@ +using System; +using MongoDB.Bson; +using MongoDB.Configuration; + +namespace MongoDB.Serialization +{ + /// + /// + /// + public class SerializationFactory : ISerializationFactory + { + private readonly MongoConfiguration _configuration; + + /// + /// Initializes a new instance of the class. + /// + public SerializationFactory() + : this(MongoConfiguration.Default) + { } + + /// + /// Initializes a new instance of the class. + /// + /// The mongo configuration. + public SerializationFactory(MongoConfiguration configuration) + { + if(configuration == null) + throw new ArgumentNullException("configuration"); + + _configuration = configuration; + } + + /// + /// Gets the builder. + /// + /// Type of the root. + /// + public IBsonObjectBuilder GetBsonBuilder(Type rootType) + { + return new BsonClassMapBuilder(_configuration.MappingStore, rootType); + } + + /// + /// Gets the descriptor. + /// + /// Type of the root. + /// + public IBsonObjectDescriptor GetBsonDescriptor(Type rootType) + { + return new BsonClassMapDescriptor(_configuration.MappingStore, rootType); + } + + /// + /// Gets the bson reader settings. + /// + /// Type of the root. + /// + public BsonReaderSettings GetBsonReaderSettings(Type rootType) + { + return new BsonReaderSettings(GetBsonBuilder(rootType)) + { + ReadLocalTime = _configuration.ReadLocalTime + }; + } + + /// + /// Gets the bson writer settings. + /// + /// Type of the root. + /// + public BsonWriterSettings GetBsonWriterSettings(Type rootType) + { + return new BsonWriterSettings(GetBsonDescriptor(rootType)); + } + + /// + /// Gets the name of the collection given the rootType. + /// + /// Type of the root. + /// + public string GetCollectionName(Type rootType) + { + if (rootType == null) + throw new ArgumentNullException("rootType"); + + if (typeof(Document).IsAssignableFrom(rootType)) + throw new InvalidOperationException("Documents cannot have a default collection name."); + + return _configuration.MappingStore.GetClassMap(rootType).CollectionName; + } + + /// + /// Gets the object descriptor. + /// + /// The type. + /// + public IObjectDescriptor GetObjectDescriptor(Type type) + { + if (typeof(Document).IsAssignableFrom(type)) + return new DocumentObjectDescriptorAdapter(); + + return new ClassMapObjectDescriptorAdapter(_configuration.MappingStore.GetClassMap(type)); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/UpdateFlags.cs b/source/MongoDB/UpdateFlags.cs new file mode 100644 index 00000000..1d21ba1d --- /dev/null +++ b/source/MongoDB/UpdateFlags.cs @@ -0,0 +1,23 @@ +namespace MongoDB +{ + /// + /// Update flags. + /// + /// + /// Bits 2-31 are Reserved and must be set to 0. + /// + public enum UpdateFlags { + /// + /// Default none. + /// + None = 0, + /// + /// If set, the database will insert the supplied object into the collection if no matching document is found. + /// + Upsert = 1, + /// + /// If set, the database will update all matching objects in the collection. Otherwise only updates first matching doc. + /// + MultiUpdate = 2 + } +} \ No newline at end of file diff --git a/MongoDBDriver/Util/ErrorTranslator.cs b/source/MongoDB/Util/ErrorTranslator.cs similarity index 92% rename from MongoDBDriver/Util/ErrorTranslator.cs rename to source/MongoDB/Util/ErrorTranslator.cs index 5c59797b..6aabef31 100644 --- a/MongoDBDriver/Util/ErrorTranslator.cs +++ b/source/MongoDB/Util/ErrorTranslator.cs @@ -1,6 +1,6 @@ using System; -namespace MongoDB.Driver +namespace MongoDB.Util { /// /// Translates an error returned from Mongo into the proper exception. @@ -30,7 +30,7 @@ public static MongoException Translate(Document error){ /// true if the specified document is error; otherwise, false. /// public static bool IsError(Document document){ - if(document.Contains("err") && document["err"] != DBNull.Value) + if(document.ContainsKey("err") && document["err"] != null) return true; return false; } @@ -69,6 +69,7 @@ private static MongoException BuildException(string errorNumber, string message, case "12010": case "12011": case "12012": + goto default; default:{ return new MongoOperationException(message, error); } diff --git a/source/MongoDB/Util/Inflector.cs b/source/MongoDB/Util/Inflector.cs new file mode 100644 index 00000000..58c08da9 --- /dev/null +++ b/source/MongoDB/Util/Inflector.cs @@ -0,0 +1,340 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Text.RegularExpressions; + +namespace MongoDB.Util +{ + /// + /// This was ripped out of SubSonic I believe. + /// + internal static class Inflector + { + private static readonly List Plurals = new List(); + private static readonly List Singulars = new List(); + private static readonly List Uncountables = new List(); + + /// + /// Initializes the class. + /// + static Inflector() + { + AddPluralRule("$", "s"); + AddPluralRule("s$", "s"); + AddPluralRule("(ax|test)is$", "$1es"); + AddPluralRule("(octop|vir)us$", "$1i"); + AddPluralRule("(alias|status)$", "$1es"); + AddPluralRule("(bu)s$", "$1ses"); + AddPluralRule("(buffal|tomat)o$", "$1oes"); + AddPluralRule("([ti])um$", "$1a"); + AddPluralRule("sis$", "ses"); + AddPluralRule("(?:([^f])fe|([lr])f)$", "$1$2ves"); + AddPluralRule("(hive)$", "$1s"); + AddPluralRule("([^aeiouy]|qu)y$", "$1ies"); + AddPluralRule("(x|ch|ss|sh)$", "$1es"); + AddPluralRule("(matr|vert|ind)ix|ex$", "$1ices"); + AddPluralRule("([m|l])ouse$", "$1ice"); + AddPluralRule("^(ox)$", "$1en"); + AddPluralRule("(quiz)$", "$1zes"); + + AddSingularRule("s$", String.Empty); + AddSingularRule("ss$", "ss"); + AddSingularRule("(n)ews$", "$1ews"); + AddSingularRule("([ti])a$", "$1um"); + AddSingularRule("((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)ses$", "$1$2sis"); + AddSingularRule("(^analy)ses$", "$1sis"); + AddSingularRule("([^f])ves$", "$1fe"); + AddSingularRule("(hive)s$", "$1"); + AddSingularRule("(tive)s$", "$1"); + AddSingularRule("([lr])ves$", "$1f"); + AddSingularRule("([^aeiouy]|qu)ies$", "$1y"); + AddSingularRule("(s)eries$", "$1eries"); + AddSingularRule("(m)ovies$", "$1ovie"); + AddSingularRule("(x|ch|ss|sh)es$", "$1"); + AddSingularRule("([m|l])ice$", "$1ouse"); + AddSingularRule("(bus)es$", "$1"); + AddSingularRule("(o)es$", "$1"); + AddSingularRule("(shoe)s$", "$1"); + AddSingularRule("(cris|ax|test)es$", "$1is"); + AddSingularRule("(octop|vir)i$", "$1us"); + AddSingularRule("(alias|status)$", "$1"); + AddSingularRule("(alias|status)es$", "$1"); + AddSingularRule("^(ox)en", "$1"); + AddSingularRule("(vert|ind)ices$", "$1ex"); + AddSingularRule("(matr)ices$", "$1ix"); + AddSingularRule("(quiz)zes$", "$1"); + + AddIrregularRule("person", "people"); + AddIrregularRule("man", "men"); + AddIrregularRule("child", "children"); + AddIrregularRule("sex", "sexes"); + AddIrregularRule("tax", "taxes"); + AddIrregularRule("move", "moves"); + + AddUnknownCountRule("equipment"); + AddUnknownCountRule("information"); + AddUnknownCountRule("rice"); + AddUnknownCountRule("money"); + AddUnknownCountRule("species"); + AddUnknownCountRule("series"); + AddUnknownCountRule("fish"); + AddUnknownCountRule("sheep"); + } + + /// + /// Adds the irregular rule. + /// + /// The singular. + /// The plural. + private static void AddIrregularRule(string singular, string plural) + { + AddPluralRule(String.Concat("(", singular[0], ")", singular.Substring(1), "$"), + String.Concat("$1", plural.Substring(1))); + AddSingularRule(String.Concat("(", plural[0], ")", plural.Substring(1), "$"), + String.Concat("$1", singular.Substring(1))); + } + + /// + /// Adds the unknown count rule. + /// + /// The word. + private static void AddUnknownCountRule(string word) + { + Uncountables.Add(word.ToLower()); + } + + /// + /// Adds the plural rule. + /// + /// The rule. + /// The replacement. + private static void AddPluralRule(string rule, string replacement) + { + Plurals.Add(new InflectorRule(rule, replacement)); + } + + /// + /// Adds the singular rule. + /// + /// The rule. + /// The replacement. + private static void AddSingularRule(string rule, string replacement) + { + Singulars.Add(new InflectorRule(rule, replacement)); + } + + /// + /// Makes the plural. + /// + /// The word. + /// + public static string MakePlural(string word) + { + return ApplyRules(Plurals, word); + } + + /// + /// Makes the singular. + /// + /// The word. + /// + public static string MakeSingular(string word) + { + return ApplyRules(Singulars, word); + } + + /// + /// Applies the rules. + /// + /// The rules. + /// The word. + /// + private static string ApplyRules(IList rules, string word) + { + var result = word; + if(!Uncountables.Contains(word.ToLower())) + for(var i = rules.Count - 1; i >= 0; i--) + { + var currentPass = rules[i].Apply(word); + if(currentPass == null) + continue; + result = currentPass; + break; + } + return result; + } + + /// + /// Converts the string to title case. + /// + /// The word. + /// + public static string ToTitleCase(string word) + { + return Regex.Replace(ToHumanCase(AddUnderscores(word)), + @"\b([a-z])", + match => match.Captures[0].Value.ToUpper()); + } + + /// + /// Converts the string to human case. + /// + /// The lowercase and underscored word. + /// + public static string ToHumanCase(string lowercaseAndUnderscoredWord) + { + return MakeInitialCaps(Regex.Replace(lowercaseAndUnderscoredWord, @"_", " ")); + } + + /// + /// Convert string to proper case + /// + /// The source string. + /// + public static string ToProper(string sourceString) + { + var propertyName = ToPascalCase(sourceString); + return propertyName; + } + + /// + /// Converts the string to pascal case. + /// + /// The lowercase and underscored word. + /// + public static string ToPascalCase(string lowercaseAndUnderscoredWord) + { + return ToPascalCase(lowercaseAndUnderscoredWord, true); + } + + /// + /// Converts text to pascal case... + /// + /// The text. + /// if set to true [remove underscores]. + /// + public static string ToPascalCase(string text, bool removeUnderscores) + { + if(String.IsNullOrEmpty(text)) + return text; + + text = text.Replace("_", " "); + var joinString = removeUnderscores ? String.Empty : "_"; + var words = text.Split(' '); + if(words.Length > 1) // || char.IsUpper(words[0][0])) + { + for(var i = 0; i < words.Length; i++) + if(words[i].Length > 0) + { + var word = words[i]; + var restOfWord = word.Substring(1); + + restOfWord = restOfWord.ToLower(CultureInfo.CurrentUICulture); + + var firstChar = char.ToUpper(word[0], CultureInfo.CurrentUICulture); + words[i] = String.Concat(firstChar, restOfWord); + } + return String.Join(joinString, words); + } + return String.Concat(words[0].Substring(0, 1).ToUpper(CultureInfo.CurrentUICulture), words[0].Substring(1)); + } + + /// + /// Converts the string to camel case. + /// + /// The lowercase and underscored word. + /// + public static string ToCamelCase(string lowercaseAndUnderscoredWord) + { + return MakeInitialLowerCase(ToPascalCase(lowercaseAndUnderscoredWord)); + } + + /// + /// Adds the underscores. + /// + /// The pascal cased word. + /// + public static string AddUnderscores(string pascalCasedWord) + { + return + Regex.Replace( + Regex.Replace(Regex.Replace(pascalCasedWord, @"([A-Z]+)([A-Z][a-z])", "$1_$2"), + @"([a-z\d])([A-Z])", + "$1_$2"), + @"[-\s]", + "_").ToLower(); + } + + /// + /// Converts the underscores to dashes. + /// + /// The underscored word. + /// + public static string ConvertUnderscoresToDashes(string underscoredWord) + { + return underscoredWord.Replace('_', '-'); + } + + /// + /// Makes the initial caps. + /// + /// The word. + /// + public static string MakeInitialCaps(string word) + { + return String.Concat(word.Substring(0, 1).ToUpper(), word.Substring(1).ToLower()); + } + + /// + /// Makes the initial lower case. + /// + /// The word. + /// + public static string MakeInitialLowerCase(string word) + { + return String.Concat(word.Substring(0, 1).ToLower(), word.Substring(1)); + } + + /// + /// Summary for the InflectorRule class + /// + private class InflectorRule + { + /// + /// + private readonly Regex regex; + + /// + /// + private readonly string replacement; + + /// + /// Initializes a new instance of the class. + /// + /// The regex pattern. + /// The replacement text. + public InflectorRule(string regexPattern, string replacementText) + { + regex = new Regex(regexPattern, RegexOptions.IgnoreCase); + replacement = replacementText; + } + + /// + /// Applies the specified word. + /// + /// The word. + /// + public string Apply(string word) + { + if(!regex.IsMatch(word)) + return null; + + var replace = regex.Replace(word, replacement); + if(word == word.ToUpper()) + replace = replace.ToUpper(); + + return replace; + } + } + } +} \ No newline at end of file diff --git a/MongoDBDriver/Util/JsonFormatter.cs b/source/MongoDB/Util/JsonFormatter.cs similarity index 53% rename from MongoDBDriver/Util/JsonFormatter.cs rename to source/MongoDB/Util/JsonFormatter.cs index f2be311d..20a1ee1e 100644 --- a/MongoDBDriver/Util/JsonFormatter.cs +++ b/source/MongoDB/Util/JsonFormatter.cs @@ -1,136 +1,158 @@ using System; using System.Collections; using System.Globalization; -using System.Text; - - -namespace MongoDB.Driver -{ - /// - /// Lightweight routines to handle basic json serializing. - /// - public class JsonFormatter +using System.Text; + +namespace MongoDB.Util +{ + /// + /// Lightweight routines to handle basic json serializing. + /// + internal class JsonFormatter { /// - /// Serializes the specified doc. + /// Serializes the specified doc. /// - /// The doc. + /// The doc. /// - public static string Serialize(Document doc){ - var json = new StringBuilder(); - json.Append("{ "); - var first = true; - foreach (String key in doc.Keys) { - if (first) { - first = false; - } else { - json.Append(", "); - } - json.AppendFormat(@"""{0}"": ", key); - SerializeType(doc[key], json); - } - json.Append(" }"); - return json.ToString(); + public static string Serialize(Document doc) + { + var json = new StringBuilder(); + json.Append("{ "); + var first = true; + foreach(var key in doc.Keys) + { + if(first) + first = false; + else + json.Append(", "); + json.AppendFormat(@"""{0}"": ", key); + SerializeType(doc[key], json); + } + json.Append(" }"); + return json.ToString(); } /// - /// Serializes the type. + /// Serializes for server side. /// /// The value. - /// The json. - private static void SerializeType(object value, StringBuilder json) { - if (value == null) { - json.Append("null"); - return; - } - var t = value.GetType(); - if (value is bool) { + /// + public static string SerializeForServerSide(object value) + { + var sb = new StringBuilder(); + if (value is DateTime) + { + DateTime d = (DateTime)value; + sb.AppendFormat("new Date({0},{1},{2},{3},{4},{5},{6})", d.Year, d.Month - 1, d.Day, d.Hour, d.Minute, d.Second, d.Millisecond); + } + else + SerializeType(value, sb); + return sb.ToString(); + } + + /// + /// Serializes the type. + /// + /// The value. + /// The json. + private static void SerializeType(object value, StringBuilder json) + { + if(value == null) + { + json.Append("null"); + return; + } + if(value is bool) json.Append(((bool)value) ? "true" : "false"); - } else if(value is Document || - value is Oid || - value is Binary || - value is DBRef || - value is MongoMinKey || - value is MongoMaxKey || - value is Code || - value is CodeWScope) { - json.Append(value); - } else if(value is int || - value is long || - value is float || - value is double ) { - // Format numbers allways culture invariant + else if(value is Oid) + json.Append(((Oid)value).ToString("J")); + else if(value is Document || + value is Binary || + value is DBRef || + value is MongoMinKey || + value is MongoMaxKey || + value is Code || + value is CodeWScope) + json.Append(value); + else if(value is int || + value is long || + value is float || + value is double) // Format numbers allways culture invariant // Example: Without this in Germany 10.3 is outputed as 10,3 json.Append(((IFormattable)value).ToString("G", CultureInfo.InvariantCulture)); - } else if(value is string){ - json.AppendFormat(@"""{0}""", Escape((string)value)); - } else if (value is DateTime) { - json.AppendFormat(@"""{0}""", ((DateTime)value).ToUniversalTime().ToString("o")); - } else if (value is Guid) { - json.Append(String.Format(@"{{ ""$uid"": ""{0}"" }}",value)); - } else if (value is IEnumerable) { + else if(value is string) + json.AppendFormat(@"""{0}""", Escape((string)value)); + else if(value is DateTime) + json.AppendFormat(@"""{0}""", ((DateTime)value).ToUniversalTime().ToString("o")); + else if(value is Guid) + json.Append(String.Format(@"{{ ""$uid"": ""{0}"" }}", value)); + else if(value is IEnumerable) + { json.Append("[ "); var first = true; - foreach (var v in (IEnumerable)value) { - if (first) { + foreach(var v in (IEnumerable)value) + { + if(first) first = false; - } else { + else json.Append(", "); - } SerializeType(v, json); } - json.Append(" ]"); - } else { - json.AppendFormat(@"""{0}""", Escape(value.ToString())); - } - return; - } - - /// - /// Escapes any characters that are special to javascript. - /// - public static string Escape(string text){ - var builder = new StringBuilder(); - foreach(char c in text){ - switch(c){ - case '\b': - builder.Append(@"\b"); - break; - case '\f': - builder.Append(@"\f"); - break; - case '\n': - builder.Append(@"\n"); - break; - case '\r': - builder.Append(@"\r"); - break; - case '\t': - builder.Append(@"\t"); - break; - case '\v': - builder.Append(@"\v"); - break; - case '\'': - builder.Append(@"\'"); - break; - case '"': - builder.Append("\\\""); - break; - case '\\': - builder.Append(@"\\"); - break; - default: - if(c <= '\u001f'){ - builder.Append("\\u"); - builder.Append(((int)c).ToString("x4")); - }else{ - builder.Append(c); - } - break; - } - } - return builder.ToString(); - } - } -} + json.Append(" ]"); + } + else + json.AppendFormat(@"""{0}""", Escape(value.ToString())); + return; + } + + /// + /// Escapes any characters that are special to javascript. + /// + public static string Escape(string text) + { + var builder = new StringBuilder(); + foreach(var c in text) + switch(c) + { + case '\b': + builder.Append(@"\b"); + break; + case '\f': + builder.Append(@"\f"); + break; + case '\n': + builder.Append(@"\n"); + break; + case '\r': + builder.Append(@"\r"); + break; + case '\t': + builder.Append(@"\t"); + break; + case '\v': + builder.Append(@"\v"); + break; + case '\'': + builder.Append(@"\'"); + break; + case '"': + builder.Append("\\\""); + break; + case '\\': + builder.Append(@"\\"); + break; + default: + if(c <= '\u001f') + { + builder.Append("\\u"); + builder.Append(((int)c).ToString("x4")); + } + else + builder.Append(c); + break; + } + return builder.ToString(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Util/MongoHash.cs b/source/MongoDB/Util/MongoHash.cs new file mode 100644 index 00000000..f4458a46 --- /dev/null +++ b/source/MongoDB/Util/MongoHash.cs @@ -0,0 +1,24 @@ +using System; +using System.Security.Cryptography; +using System.Text; + +namespace MongoDB.Util +{ + /// + /// + /// + internal static class MongoHash + { + /// + /// Generate a hash for the specified text. + /// + /// The text. + /// + public static string Generate(string text) + { + var md5 = MD5.Create(); + var hash = md5.ComputeHash(Encoding.Default.GetBytes(text)); + return BitConverter.ToString(hash).Replace("-", "").ToLower(); + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Util/OidGenerator.cs b/source/MongoDB/Util/OidGenerator.cs new file mode 100644 index 00000000..7892ad30 --- /dev/null +++ b/source/MongoDB/Util/OidGenerator.cs @@ -0,0 +1,112 @@ +using System; +using System.Diagnostics; +using System.Net; +using System.Security.Cryptography; +using System.Text; +using MongoDB.Bson; + +namespace MongoDB.Util +{ + /// + /// + internal class OidGenerator + { + private readonly object _inclock = new object(); + private int _inc; + private byte[] _machineHash; + private byte[] _procId; + + /// + /// Initializes a new instance of the class. + /// + public OidGenerator() + { + GenerateConstants(); + } + + /// + /// Generates this instance. + /// + /// + public Oid Generate() + { + //FIXME Endian issues with this code. + //.Net runs in native endian mode which is usually little endian. + //Big endian machines don't need the reversing (Linux+PPC, XNA on XBox) + var oid = new byte[12]; + var copyidx = 0; + + var time = BitConverter.GetBytes(GenerateTime()); + Array.Reverse(time); + Array.Copy(time, 0, oid, copyidx, 4); + copyidx += 4; + + Array.Copy(_machineHash, 0, oid, copyidx, 3); + copyidx += 3; + + Array.Copy(_procId, 2, oid, copyidx, 2); + copyidx += 2; + + var inc = BitConverter.GetBytes(GenerateInc()); + Array.Reverse(inc); + Array.Copy(inc, 1, oid, copyidx, 3); + + return new Oid(oid); + } + + /// + /// Generates the time. + /// + /// + private int GenerateTime() + { + var now = DateTime.UtcNow; + //DateTime nowtime = new DateTime(epoch.Year, epoch.Month, epoch.Day, now.Hour, now.Minute, now.Second, now.Millisecond); + var diff = now - BsonInfo.Epoch; + return Convert.ToInt32(Math.Floor(diff.TotalSeconds)); + } + + /// + /// Generates the inc. + /// + /// + private int GenerateInc() + { + lock(_inclock) + { + return ++_inc; + } + } + + /// + /// Generates the constants. + /// + private void GenerateConstants() + { + _machineHash = GenerateHostHash(); + _procId = BitConverter.GetBytes(GenerateProcId()); + Array.Reverse(_procId); + } + + /// + /// Generates the host hash. + /// + /// + private byte[] GenerateHostHash() + { + var md5 = MD5.Create(); + var host = Dns.GetHostName(); + return md5.ComputeHash(Encoding.Default.GetBytes(host)); + } + + /// + /// Generates the proc id. + /// + /// + private int GenerateProcId() + { + var proc = Process.GetCurrentProcess(); + return proc.Id; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Util/ReflectionExtensions.cs b/source/MongoDB/Util/ReflectionExtensions.cs new file mode 100644 index 00000000..e9af7426 --- /dev/null +++ b/source/MongoDB/Util/ReflectionExtensions.cs @@ -0,0 +1,78 @@ +using System; +using System.Reflection; + +namespace MongoDB.Util +{ + /// + /// + /// + internal static class ReflectionExtensions + { + /// + /// Gets the custom attribute. + /// + /// + /// The member. + /// if set to true [inherit]. + /// + public static T GetCustomAttribute(this MemberInfo member, bool inherit) where T : Attribute + { + var atts = member.GetCustomAttributes(typeof(T), inherit); + if (atts.Length > 0) + return (T)atts[0]; + + return null; + } + + /// + /// Gets the return type of the member. + /// + /// The member. + /// + public static Type GetReturnType(this MemberInfo member) + { + switch (member.MemberType) + { + case MemberTypes.Field: + return ((FieldInfo)member).FieldType; + case MemberTypes.Property: + return ((PropertyInfo)member).PropertyType; + case MemberTypes.Method: + return ((MethodInfo)member).ReturnType; + } + + throw new NotSupportedException("Only fields, properties, and methods are supported."); + } + + /// + /// Determines whether [is open type assignable from] [the specified open type]. + /// + /// Type of the open. + /// Type of the closed. + /// + /// true if [is open type assignable from] [the specified open type]; otherwise, false. + /// + public static bool IsOpenTypeAssignableFrom(this Type openType, Type closedType) + { + if (!openType.IsGenericTypeDefinition) + throw new ArgumentException("Must be an open generic type.", "openType"); + if (!closedType.IsGenericType || closedType.IsGenericTypeDefinition) + return false; + + var openArgs = openType.GetGenericArguments(); + var closedArgs = closedType.GetGenericArguments(); + if (openArgs.Length != closedArgs.Length) + return false; + try + { + var newType = openType.MakeGenericType(closedArgs); + return newType.IsAssignableFrom(closedType); + } + catch + { + //we don't really care here, it just means the answer is false. + return false; + } + } + } +} diff --git a/source/MongoDB/Util/ScopedDictionary.cs b/source/MongoDB/Util/ScopedDictionary.cs new file mode 100644 index 00000000..57c0ea33 --- /dev/null +++ b/source/MongoDB/Util/ScopedDictionary.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; + +namespace MongoDB.Util +{ + internal class ScopedDictionary + { + private readonly Dictionary _map; + private readonly ScopedDictionary _previous; + + public ScopedDictionary(ScopedDictionary previous) + { + _previous = previous; + _map = new Dictionary(); + } + + public ScopedDictionary(ScopedDictionary previous, IEnumerable> pairs) + : this(previous) + { + foreach(var p in pairs) + _map.Add(p.Key, p.Value); + } + + public void Add(TKey key, TValue value) + { + _map.Add(key, value); + } + + public bool TryGetValue(TKey key, out TValue value) + { + for(var scope = this; scope != null; scope = scope._previous) + if(scope._map.TryGetValue(key, out value)) + return true; + value = default(TValue); + return false; + } + + public bool ContainsKey(TKey key) + { + for(var scope = this; scope != null; scope = scope._previous) + if(scope._map.ContainsKey(key)) + return true; + return false; + } + } +} \ No newline at end of file diff --git a/source/MongoDB/Util/TypeHelper.cs b/source/MongoDB/Util/TypeHelper.cs new file mode 100644 index 00000000..789c0148 --- /dev/null +++ b/source/MongoDB/Util/TypeHelper.cs @@ -0,0 +1,77 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace MongoDB.Util +{ + internal static class TypeHelper + { + private static Type FindIEnumerable(Type seqType) + { + if(seqType == null || seqType == typeof(string)) + return null; + if(seqType.IsArray) + return typeof(IEnumerable<>).MakeGenericType(seqType.GetElementType()); + if(seqType.IsGenericType) + foreach(var arg in seqType.GetGenericArguments()) + { + var ienum = typeof(IEnumerable<>).MakeGenericType(arg); + if(ienum.IsAssignableFrom(seqType)) + return ienum; + } + var ifaces = seqType.GetInterfaces(); + if(ifaces != null && ifaces.Length > 0) + foreach(var ienum in ifaces.Select(iface => FindIEnumerable(iface)) + .Where(ienum => ienum != null)) + return ienum; + if(seqType.BaseType != null && seqType.BaseType != typeof(object)) + return FindIEnumerable(seqType.BaseType); + return null; + } + + internal static Type GetSequenceType(Type elementType) + { + return typeof(IEnumerable<>).MakeGenericType(elementType); + } + + internal static Type GetElementType(Type seqType) + { + var ienum = FindIEnumerable(seqType); + return ienum == null ? seqType : ienum.GetGenericArguments()[0]; + } + + internal static bool IsNullableType(Type type) + { + return type != null && type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>); + } + + internal static bool IsNullAssignable(Type type) + { + return !type.IsValueType || IsNullableType(type); + } + + internal static Type GetNonNullableType(Type type) + { + return IsNullableType(type) ? type.GetGenericArguments()[0] : type; + } + + internal static bool IsNativeToMongo(Type type) + { + var typeCode = Type.GetTypeCode(type); + + if (typeCode != TypeCode.Object) + return true; + + if (type == typeof(Guid)) + return true; + + if (type == typeof(Oid)) + return true; + + if (type == typeof(byte[])) + return true; + + return false; + } + } +} \ No newline at end of file diff --git a/MongoDB.Driver.Benchmark/AssemblyInfo.cs b/tools/Benchmark/AssemblyInfo.cs similarity index 96% rename from MongoDB.Driver.Benchmark/AssemblyInfo.cs rename to tools/Benchmark/AssemblyInfo.cs index 14ce3cce..8ec117b2 100644 --- a/MongoDB.Driver.Benchmark/AssemblyInfo.cs +++ b/tools/Benchmark/AssemblyInfo.cs @@ -1,6 +1,5 @@ -using System.Reflection; -using System.Runtime.CompilerServices; - +using System.Reflection; + // Information about this assembly is defined by the following attributes. // Change them to the values specific to your project. diff --git a/tools/Benchmark/Benchmark.csproj b/tools/Benchmark/Benchmark.csproj new file mode 100644 index 00000000..e7917e2e --- /dev/null +++ b/tools/Benchmark/Benchmark.csproj @@ -0,0 +1,84 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {5ACD68A0-0F2E-452A-90E3-3D1CB82C055B} + Exe + Benchmark + Benchmark + v3.5 + + + + + 3.5 + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true + + + true + full + false + bin\Debug + DEBUG + prompt + 4 + AllRules.ruleset + + + none + false + bin\Release + prompt + 4 + AllRules.ruleset + + + + + + + + + + + + {B125BBA6-BFFD-44FA-9254-9B1754CD8AF3} + MongoDB + + + + + False + .NET Framework 3.5 SP1 Client Profile + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + + + \ No newline at end of file diff --git a/tools/Benchmark/Main.cs b/tools/Benchmark/Main.cs new file mode 100644 index 00000000..0e027e9c --- /dev/null +++ b/tools/Benchmark/Main.cs @@ -0,0 +1,292 @@ +using System; +using System.IO; +using MongoDB.Bson; + +namespace MongoDB.Driver.Benchmark +{ + /// + /// This is the standard 10gen benchmark program. + /// + internal class MainClass + { + private static readonly Document large = new Document(); + private static readonly Document medium = new Document(); + private static readonly Document small = new Document(); + + private static int batchSize = 100; + private static int perTrial = 5000; + private static int trials = 1; + + public static void Main(string[] args) + { + SetupDocuments(); + + var m = new Mongo(); + m.Connect(); + var db = m["benchmark"]; + + db.Metadata.DropDatabase(); + Console.WriteLine("Starting Tests"); + + RunEncodeTest("encode (small)", small); + RunEncodeTest("encode (medium)", medium); + RunEncodeTest("encode (large)", large); + + RunDecodeTest("decode (small)", small); + RunDecodeTest("decode (medium)", medium); + RunDecodeTest("decode (large)", large); + + db.Metadata.DropDatabase(); + RunInsertTest("insert (small, no index)", db, "small_none", small, false, false); + RunInsertTest("insert (medium, no index)", db, "medium_none", medium, false, false); + RunInsertTest("insert (large, no index)", db, "large_none", large, false, false); + + RunInsertTest("insert (small, indexed)", db, "small_index", small, true, false); + RunInsertTest("insert (medium, indexed)", db, "medium_index", medium, true, false); + RunInsertTest("insert (large, indexed)", db, "large_index", large, true, false); + + RunInsertTest("batch insert (small, no index)", db, "small_bulk", small, false, true); + RunInsertTest("batch insert (medium, no index)", db, "medium_bulk", medium, false, true); + RunInsertTest("batch insert (large, no index)", db, "large_bulk", large, false, true); + + var fonespec = new Document().Add("x", perTrial/2); + RunFindTest("find_one (small, no index)", db, "small_none", fonespec, false); + RunFindTest("find_one (medium, no index)", db, "medium_none", fonespec, false); + RunFindTest("find_one (large, no index)", db, "large_none", fonespec, false); + + RunFindTest("find_one (small, indexed)", db, "small_index", fonespec, false); + RunFindTest("find_one (medium, indexed)", db, "medium_index", fonespec, false); + RunFindTest("find_one (large, indexed)", db, "large_index", fonespec, false); + + RunFindTest("find (small, no index)", db, "small_none", fonespec, true); + RunFindTest("find (medium, no index)", db, "medium_none", fonespec, true); + RunFindTest("find (large, no index)", db, "large_none", fonespec, true); + + RunFindTest("find (small, indexed)", db, "small_index", fonespec, true); + RunFindTest("find (medium, indexed)", db, "medium_index", fonespec, true); + RunFindTest("find (large, indexed)", db, "large_index", fonespec, true); + + var findRange = new Document().Add("x", new Document().Add("$gt", perTrial/2).Add("$lt", perTrial/2 + batchSize)); + RunFindTest("find range (small, indexed)", db, "small_index", findRange, true); + RunFindTest("find range (medium, indexed)", db, "medium_index", findRange, true); + RunFindTest("find range (large, indexed)", db, "large_index", findRange, true); + + Console.WriteLine("Press any key to continue..."); + Console.ReadKey(); + } + + private static void SetupDocuments() + { + medium.Add("integer", 5); + medium.Add("number", 5.05); + medium.Add("boolean", false); + medium.Add("array", new[] {"test", "benchmark"}); + + large.Add("base_url", "http://www.example.com/test-me"); + large.Add("total_word_count", 6743); + large.Add("access_time", DateTime.UtcNow); + large.Add("meta_tags", + new Document() + .Add("description", "i am a long description string") + .Add("author", "Holly Man") + .Add("dynamically_created_meta_tag", "who know\n what")); + large.Add("page_structure", + new Document().Add("counted_tags", 3450) + .Add("no_of_js_attached", 10) + .Add("no_of_images", 6)); + var words = new[] + { + "10gen", "web", "open", "source", "application", "paas", + "platform-as-a-service", "technology", "helps", + "developers", "focus", "building", "mongodb", "mongo" + }; + var harvestedWords = new string[words.Length*20]; + for(var i = 0; i < words.Length*20; i++) + harvestedWords[i] = words[i%words.Length]; + large.Add("harvested_words", harvestedWords); + } + + private static void RunInsertTest(string name, IMongoDatabase db, string col, Document doc, bool index, bool bulk) + { + var lowest = TimeSpan.MaxValue; + for(var i = 0; i < trials; i++) + { + SetupInsert(db, "col", index); + var ret = TimeInsert(db, col, doc, bulk); + if(ret < lowest) + lowest = ret; + } + var opsSec = (int)(perTrial/lowest.TotalSeconds); + Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); + } + + private static void SetupInsert(IMongoDatabase db, string col, bool index) + { + try + { + db.Metadata.DropCollection(col); + if(index) + { + var idx = new Document().Add("x", IndexOrder.Ascending); + db[col].Metadata.CreateIndex(idx, false); + } + } + catch(MongoCommandException) + { + //swallow for now. + } + } + + private static TimeSpan TimeInsert(IMongoDatabase db, string col, Document doc, bool bulk) + { + var start = DateTime.Now; + if(bulk) + DoBulkInsert(db, col, doc, batchSize); + else + DoInsert(db, col, doc); + var stop = DateTime.Now; + var t = stop - start; + return t; + } + + private static void DoInsert(IMongoDatabase db, string col, Document doc) + { + for(var i = 0; i < perTrial; i++) + { + var ins = new Document(); + doc.CopyTo(ins); + ins.Add("x", i); + db[col].Insert(ins); + } + } + + private static void DoBulkInsert(IMongoDatabase db, string col, Document doc, int size) + { + for(var i = 0; i < perTrial/size; i++) + { + var docs = new Document[size]; + for(var f = 0; f < docs.Length; f++) + { + var ins = new Document(); + doc.CopyTo(ins); + docs[f] = ins; + } + db[col].Insert(docs); + } + } + + private static void RunEncodeTest(string name, Document doc) + { + var lowest = TimeSpan.MaxValue; + for(var i = 0; i < trials; i++) + { + var ret = TimeEncode(doc); + if(ret < lowest) + lowest = ret; + } + var opsSec = (int)(perTrial/lowest.TotalSeconds); + Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); + } + + private static TimeSpan TimeEncode(Document doc) + { + var start = DateTime.Now; + DoEncode(doc); + var stop = DateTime.Now; + var t = stop - start; + return t; + } + + private static void DoEncode(Document doc) + { + var ms = new MemoryStream(); + for(var i = 0; i < perTrial; i++) + { + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + writer.WriteObject(doc); + ms.Seek(0, SeekOrigin.Begin); + } + } + + private static void RunDecodeTest(string name, Document doc) + { + var ms = new MemoryStream(); + var writer = new BsonWriter(ms, new BsonDocumentDescriptor()); + writer.WriteObject(doc); + + var buff = ms.ToArray(); + + var lowest = TimeSpan.MaxValue; + for(var i = 0; i < trials; i++) + { + var ret = TimeDecode(buff); + if(ret < lowest) + lowest = ret; + } + var opsSec = (int)(perTrial/lowest.TotalSeconds); + Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); + } + + private static TimeSpan TimeDecode(byte[] doc) + { + var start = DateTime.Now; + DoDecode(doc); + var stop = DateTime.Now; + var t = stop - start; + return t; + } + + private static void DoDecode(byte[] buff) + { + var ms = new MemoryStream(buff); + for(var i = 0; i < perTrial; i++) + { + var reader = new BsonReader(ms, new BsonDocumentBuilder()); + reader.Read(); + ms.Seek(0, SeekOrigin.Begin); + } + } + + private static void RunFindTest(string name, IMongoDatabase db, string col, Document spec, bool range) + { + var lowest = TimeSpan.MaxValue; + for(var i = 0; i < trials; i++) + { + var ret = TimeFind(db, col, spec, range); + if(ret < lowest) + lowest = ret; + } + var opsSec = (int)(perTrial/lowest.TotalSeconds); + Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest)); + } + + private static TimeSpan TimeFind(IMongoDatabase db, string col, Document psec, bool range) + { + var start = DateTime.Now; + if(range) + DoFindOne(db, col, psec); + else + DoFind(db, col, psec); + var stop = DateTime.Now; + var t = stop - start; + return t; + } + + private static void DoFindOne(IMongoDatabase db, string col, Document spec) + { + for(var i = 0; i < perTrial; i++) + db[col].FindOne(spec); + } + + private static void DoFind(IMongoDatabase db, string col, Document spec) + { + for(var i = 0; i < perTrial; i++) + { + var cur = db[col].Find(spec); + foreach(var d in cur.Documents) + { + } + } + } + } +} \ No newline at end of file diff --git a/tools/MSBuild.Community.Tasks/ICSharpCode.SharpZipLib.dll b/tools/MSBuild.Community.Tasks/ICSharpCode.SharpZipLib.dll new file mode 100644 index 00000000..77bafe8b Binary files /dev/null and b/tools/MSBuild.Community.Tasks/ICSharpCode.SharpZipLib.dll differ diff --git a/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.Targets b/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.Targets new file mode 100644 index 00000000..1f01541b --- /dev/null +++ b/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.Targets @@ -0,0 +1,136 @@ + + + + + + $(MSBuildExtensionsPath)\MSBuildCommunityTasks + $(MSBuildCommunityTasksPath)\MSBuild.Community.Tasks.dll + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.dll b/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.dll new file mode 100644 index 00000000..cf847a55 Binary files /dev/null and b/tools/MSBuild.Community.Tasks/MSBuild.Community.Tasks.dll differ