diff --git a/.gitattributes b/.gitattributes
index 79531b9..3f81148 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -76,3 +76,6 @@
# *nix shell scripts always use LF (see .editorconfig)
*.sh eol=lf
+
+# INTERLIS files set to a fixed line ending to get consistent file hashes
+*.ili text eol=lf
diff --git a/src/ClientApp/src/components/Detail.jsx b/src/ClientApp/src/components/Detail.jsx
index 959845f..5cc3324 100644
--- a/src/ClientApp/src/components/Detail.jsx
+++ b/src/ClientApp/src/components/Detail.jsx
@@ -20,7 +20,6 @@ import { getAllModels } from "./Utils";
export function Detail() {
const [model, setModel] = useState();
const [loading, setLoading] = useState();
- const [modelText, setModelText] = useState("");
const { t } = useTranslation("common");
const location = useLocation();
@@ -51,39 +50,16 @@ export function Detail() {
useEffect(() => {
setLoading(true);
- async function getModelPreview(model) {
- // Use try catch block to avoid error when CORS prevents successful fetch.
- try {
- const response = await fetch(model.uri);
- if (response?.ok) {
- setModelText(await response.text());
- setLoading(false);
- } else {
- setModelText(t("no-model-preview"));
- setLoading(false);
- }
- } catch {
- setModelText(t("no-model-preview"));
- setLoading(false);
- }
- }
-
async function getModel(md5, name) {
const response = await fetch("/model/" + md5 + "/" + name);
- if (response.ok) {
- if (response.status === 204 /* No Content */) {
- setModel();
- setLoading(false);
- } else {
- const model = await response.json();
- setModel(model);
- getModelPreview(model);
- }
+ if (response.ok && response.status !== 204 /* No Content */) {
+ const model = await response.json();
+ setModel(model);
} else {
setModel();
- setLoading(false);
}
+ setLoading(false);
}
getModel(md5, name);
}, [md5, name, t]);
@@ -99,7 +75,7 @@ export function Detail() {
{t("to-search")}
)}
- {(!model || !modelText) && loading && (
+ {!model && loading && (
@@ -109,7 +85,7 @@ export function Detail() {
{t("invalid-model-url")}
)}
- {model && modelText && (
+ {model && (
<>
@@ -169,9 +145,7 @@ export function Detail() {
key={m}
label={m}
variant="outlined"
- >
- {m}
-
+ />
))}
)}
@@ -181,12 +155,13 @@ export function Detail() {
{t("catalogue-files")}:{" "}
{model.catalogueFiles &&
model.catalogueFiles
- .sort((a, b) => {
- const result = (a.match(/\//g) || []).length - (b.match(/\//g) || []).length;
- return result === 0 ? a.localeCompare(b, undefined, { sensitivity: "base" }) : result;
- })
+ .sort(
+ (a, b) =>
+ (a.match(/\//g) || []).length - (b.match(/\//g) || []).length ||
+ a.localeCompare(b, undefined, { sensitivity: "base" }),
+ )
.map((f) => (
-
+
{f}
@@ -222,7 +197,7 @@ export function Detail() {
inputProps={{ style: { fontSize: 12, fontFamily: "'Courier New', monospace" } }}
InputLabelProps={{ style: { fontSize: 22 } }}
InputProps={{ readOnly: true, style: { fontSize: 22 } }}
- value={modelText}
+ value={model.fileContent?.content ?? t("no-model-preview")}
focused={false}
/>
diff --git a/src/Controllers/ModelController.cs b/src/Controllers/ModelController.cs
index 241679d..100d2a8 100644
--- a/src/Controllers/ModelController.cs
+++ b/src/Controllers/ModelController.cs
@@ -33,6 +33,7 @@ public ModelController(ILogger logger, RepoBrowserContext conte
var model = context.Models
.Include(m => m.ModelRepository)
+ .Include(m => m.FileContent)
.Where(m => m.MD5 == md5 && m.Name == name)
.AsNoTracking()
.SingleOrDefault();
diff --git a/src/Crawler/IRepositoryCrawler.cs b/src/Crawler/IRepositoryCrawler.cs
index 7ceb1a9..63f4b88 100644
--- a/src/Crawler/IRepositoryCrawler.cs
+++ b/src/Crawler/IRepositoryCrawler.cs
@@ -14,4 +14,12 @@ public interface IRepositoryCrawler
/// The that contain the repository at the root of the model repository tree and other configurations.
/// Dictionary containing all repositories found in tree. Repository host is used as key. Repositories contain all found information. Root repository contains full tree.
Task> CrawlModelRepositories(RepositoryCrawlerOptions options);
+
+ ///
+ /// Fetches the INTERLIS files from the . Files are identified by their MD5 hash and only downloaded if not already contained in .
+ /// If a is missing the property, it is set according to the downloaded file.
+ ///
+ /// The s previously fetched.
+ /// The repositories to fetch the files for.
+ Task FetchInterlisFiles(IEnumerable existingFiles, IEnumerable repositories);
}
diff --git a/src/Crawler/RepositoryCrawler.cs b/src/Crawler/RepositoryCrawler.cs
index 6c83c1b..9c4a02f 100644
--- a/src/Crawler/RepositoryCrawler.cs
+++ b/src/Crawler/RepositoryCrawler.cs
@@ -18,6 +18,44 @@ public RepositoryCrawler(ILogger logger, IHttpClientFactory h
httpClient = httpClientFactory.CreateClient();
}
+ ///
+ public async Task FetchInterlisFiles(IEnumerable existingFiles, IEnumerable repositories)
+ {
+ var allFiles = existingFiles.ToDictionary(f => f.MD5, StringComparer.OrdinalIgnoreCase);
+ foreach (var repository in repositories)
+ {
+ foreach (var model in repository.Models)
+ {
+ InterlisFile? file;
+ if (!string.IsNullOrEmpty(model.MD5) && allFiles.TryGetValue(model.MD5, out file))
+ {
+ model.FileContent = file;
+ continue;
+ }
+
+ var modelFileUrl = model.ModelRepository.Uri.Append(model.File);
+ file = await FetchInterlisFile(modelFileUrl).ConfigureAwait(false);
+ if (file != null)
+ {
+ if (!allFiles.TryAdd(file.MD5, file))
+ {
+ file = allFiles[file.MD5];
+ }
+
+ model.FileContent = file;
+ if (string.IsNullOrEmpty(model.MD5))
+ {
+ model.MD5 = file.MD5;
+ }
+ else if (!model.MD5.Equals(file.MD5, StringComparison.OrdinalIgnoreCase))
+ {
+ logger.LogWarning("The MD5 Hash of Model <{Model}> ({MD5Model}) does not match that of the file <{URL}> ({MD5File}).", model.Name, model.MD5, modelFileUrl, file.MD5);
+ }
+ }
+ }
+ }
+ }
+
///
public async Task> CrawlModelRepositories(RepositoryCrawlerOptions options)
{
@@ -191,26 +229,6 @@ private async Task> CrawlIlimodels(Uri repositoryUri)
})
.ToHashSet();
- foreach (var model in models)
- {
- if (string.IsNullOrEmpty(model.MD5))
- {
- var modelFileUrl = repositoryUri.Append(model.File);
- logger.LogInformation("Calculate missing MD5 for Model <{Model}> in File <{URL}>.", model.Name, modelFileUrl);
-
- try
- {
- var stream = await GetStreamFromUrl(modelFileUrl).ConfigureAwait(false);
- var md5 = await GetMD5FromStream(stream).ConfigureAwait(false);
- model.MD5 = md5;
- }
- catch (Exception ex) when (ex is HttpRequestException || ex is OperationCanceledException)
- {
- logger.LogError(ex, "Failed to calculate missing MD5 for Model <{Model}> in File <{URL}>", model.Name, modelFileUrl);
- }
- }
- }
-
return models;
}
}
@@ -224,6 +242,29 @@ private async Task> CrawlIlimodels(Uri repositoryUri)
}
}
+ private async Task FetchInterlisFile(Uri fileUri)
+ {
+ logger.LogDebug("Download INTERLIS file <{URL}>", fileUri);
+ try
+ {
+ var stream = await GetStreamFromUrl(fileUri).ConfigureAwait(false);
+ var md5 = await GetMD5FromStream(stream).ConfigureAwait(false);
+ stream.Seek(0, SeekOrigin.Begin);
+ using var reader = new StreamReader(stream);
+ var content = reader.ReadToEnd();
+ return new InterlisFile
+ {
+ MD5 = md5,
+ Content = content,
+ };
+ }
+ catch (Exception ex) when (ex is HttpRequestException || ex is OperationCanceledException)
+ {
+ logger.LogError(ex, "Failed to download INTERLIS file <{URL}>", fileUri);
+ return null;
+ }
+ }
+
private async Task GetStreamFromUrl(Uri url)
{
var response = await httpClient.GetAsync(url).ConfigureAwait(false);
diff --git a/src/DbUpdateService.cs b/src/DbUpdateService.cs
index a8882df..96f6a55 100644
--- a/src/DbUpdateService.cs
+++ b/src/DbUpdateService.cs
@@ -39,37 +39,38 @@ private async Task UpdateModelRepoDatabase()
try
{
- using (var scope = scopeFactory.CreateScope())
+ using var scope = scopeFactory.CreateScope();
+
+ var crawler = scope.ServiceProvider.GetRequiredService();
+ var repositories = await crawler.CrawlModelRepositories(crawlerOptions).ConfigureAwait(false);
+ using var context = scope.ServiceProvider.GetRequiredService();
+
+ var knownParentRepositories = context.Repositories
+ .Where(r => r.SubsidiarySites.Any())
+ .Select(r => r.HostNameId)
+ .ToList();
+
+ var allParentRepositoriesCrawled = knownParentRepositories.All(repositories.ContainsKey);
+
+ if (repositories.Any() && allParentRepositoriesCrawled)
+ {
+ using var transaction = context.Database.BeginTransaction();
+
+ context.Catalogs.ExecuteDelete();
+ context.Models.ExecuteDelete();
+ context.Repositories.ExecuteDelete();
+ context.SaveChanges();
+
+ await crawler.FetchInterlisFiles(context.InterlisFiles, repositories.Values);
+ context.Repositories.AddRange(repositories.Values);
+ context.SaveChanges();
+
+ transaction.Commit();
+ logger.LogInformation("Updating ModelRepoDatabase complete. Inserted {RepositoryCount} repositories.", repositories.Count);
+ }
+ else
{
- var crawler = scope.ServiceProvider.GetRequiredService();
- var repositories = await crawler.CrawlModelRepositories(crawlerOptions).ConfigureAwait(false);
- using var context = scope.ServiceProvider.GetRequiredService();
-
- var knownParentRepositories = context.Repositories
- .Where(r => r.SubsidiarySites.Any())
- .Select(r => r.HostNameId)
- .ToList();
-
- var allParentRepositoriesCrawled = knownParentRepositories.All(repositories.ContainsKey);
-
- if (repositories.Any() && allParentRepositoriesCrawled)
- {
- context.Database.BeginTransaction();
- context.Catalogs.RemoveRange(context.Catalogs);
- context.Models.RemoveRange(context.Models);
- context.Repositories.RemoveRange(context.Repositories);
- context.SaveChanges();
-
- context.Repositories.AddRange(repositories.Values);
- context.SaveChanges();
-
- context.Database.CommitTransaction();
- logger.LogInformation("Updating ModelRepoDatabase complete. Inserted {RepositoryCount} repositories.", repositories.Count);
- }
- else
- {
- logger.LogError("Updating ModelRepoDatabase aborted. Crawler could not parse all required repositories.");
- }
+ logger.LogError("Updating ModelRepoDatabase aborted. Crawler could not parse all required repositories.");
}
healthCheck.LastDbUpdateSuccessful = true;
diff --git a/src/Models/Catalog.cs b/src/Models/Catalog.cs
index dc9ba18..7d64e35 100644
--- a/src/Models/Catalog.cs
+++ b/src/Models/Catalog.cs
@@ -19,4 +19,6 @@ public class Catalog
public string? Title { get; set; }
public List ReferencedModels { get; set; }
+
+ public Repository ModelRepository { get; set; }
}
diff --git a/src/Models/InterlisFile.cs b/src/Models/InterlisFile.cs
new file mode 100644
index 0000000..131f769
--- /dev/null
+++ b/src/Models/InterlisFile.cs
@@ -0,0 +1,13 @@
+using System.ComponentModel.DataAnnotations;
+
+namespace ModelRepoBrowser.Models;
+
+public class InterlisFile
+{
+ [Key]
+ public string MD5 { get; set; }
+
+ public string Content { get; set; }
+
+ public ICollection Models { get; set; }
+}
diff --git a/src/Models/Model.cs b/src/Models/Model.cs
index 3ae9506..31752e1 100644
--- a/src/Models/Model.cs
+++ b/src/Models/Model.cs
@@ -7,6 +7,9 @@ public class Model
{
public int Id { get; set; }
+ ///
+ /// The MD5 Hash of the INTERLIS file that contains this model.
+ ///
public string? MD5 { get; set; }
public string Name { get; set; }
@@ -33,6 +36,11 @@ public class Model
public string? FurtherInformation { get; set; }
+ ///
+ /// The actual content of the INTERLIS file.
+ ///
+ public InterlisFile FileContent { get; set; }
+
[NotMapped]
public bool? IsDependOnModelResult { get; set; } = false;
diff --git a/src/RepoBrowserContext.cs b/src/RepoBrowserContext.cs
index 8bfebc2..55da48a 100644
--- a/src/RepoBrowserContext.cs
+++ b/src/RepoBrowserContext.cs
@@ -17,4 +17,5 @@ public RepoBrowserContext(DbContextOptions options)
public DbSet Models { get; set; }
public DbSet Catalogs { get; set; }
public DbSet SearchQueries { get; set; }
+ public DbSet InterlisFiles { get; set; }
}
diff --git a/tests/Initialize.cs b/tests/Initialize.cs
index 8b40215..4accf7f 100644
--- a/tests/Initialize.cs
+++ b/tests/Initialize.cs
@@ -1,9 +1,5 @@
-using Microsoft.VisualStudio.TestTools.UnitTesting;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace ModelRepoBrowser;
@@ -20,10 +16,11 @@ public static void AssemplyInitialize(TestContext testContext)
context.Database.EnsureCreated();
// Clear database and fill it with test data
- context.SearchQueries.RemoveRange(context.SearchQueries);
- context.Catalogs.RemoveRange(context.Catalogs);
- context.Models.RemoveRange(context.Models);
- context.Repositories.RemoveRange(context.Repositories);
+ context.SearchQueries.ExecuteDelete();
+ context.Catalogs.ExecuteDelete();
+ context.Models.ExecuteDelete();
+ context.Repositories.ExecuteDelete();
+ context.InterlisFiles.ExecuteDelete();
context.SaveChanges();
context.SeedData();
diff --git a/tests/ModelRepoBrowser.Test.csproj b/tests/ModelRepoBrowser.Test.csproj
index 4a13a08..a857a30 100644
--- a/tests/ModelRepoBrowser.Test.csproj
+++ b/tests/ModelRepoBrowser.Test.csproj
@@ -58,6 +58,9 @@
PreserveNewest
+
+ PreserveNewest
+
diff --git a/tests/RepoBrowserContextExtensions.cs b/tests/RepoBrowserContextExtensions.cs
index 11f5db4..df08085 100644
--- a/tests/RepoBrowserContextExtensions.cs
+++ b/tests/RepoBrowserContextExtensions.cs
@@ -1,11 +1,6 @@
using Bogus;
using ModelRepoBrowser.Models;
-using System;
-using System.Collections.Generic;
using System.Globalization;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
namespace ModelRepoBrowser;
@@ -48,6 +43,16 @@ public static void SeedData(this RepoBrowserContext context)
child.ParentSites.Add(repositories[9]);
}
+ var fakeInterlisFile = new Faker()
+ .StrictMode(true)
+ .RuleFor(f => f.MD5, f => f.Random.Hash(32))
+ .RuleFor(f => f.Content, f => f.Lorem.Lines(f.Random.Int(10, 50)))
+ .RuleFor(f => f.Models, _ => new List());
+ InterlisFile SeededinterlisFile(int seed) => fakeInterlisFile.UseSeed(seed).Generate();
+ var interlisFiles = Enumerable.Range(1, 100).Select(SeededinterlisFile).ToList();
+ context.InterlisFiles.AddRange(interlisFiles);
+ context.SaveChanges();
+
var modelIds = 1;
var modelRange = Enumerable.Range(modelIds, 100);
var fakeModels = new Faker()
@@ -68,7 +73,8 @@ public static void SeedData(this RepoBrowserContext context)
.RuleFor(m => m.ModelRepository, f => f.PickRandom(repositories))
.RuleFor(m => m.IsDependOnModelResult, _ => false)
.RuleFor(m => m.Title, f => f.Random.Words(5))
- .RuleFor(m => m.CatalogueFiles, _ => new List());
+ .RuleFor(m => m.CatalogueFiles, _ => new List())
+ .RuleFor(m => m.FileContent, f => f.PickRandom(interlisFiles));
Model SeededModel(int seed) => fakeModels.UseSeed(seed).Generate();
var models = modelRange.Select(SeededModel);
context.Models.AddRange(models);
@@ -86,7 +92,8 @@ public static void SeedData(this RepoBrowserContext context)
.RuleFor(c => c.Owner, f => f.Name.FullName())
.RuleFor(c => c.File, f => new List { f.Random.AlphaNumeric(28) })
.RuleFor(c => c.Title, f => f.Random.Word())
- .RuleFor(c => c.ReferencedModels, f => f.PickRandom(models, 2).Select(m => m.Name).ToList().OrDefault(f, defaultValue: new List()));
+ .RuleFor(c => c.ReferencedModels, f => f.PickRandom(models, 2).Select(m => m.Name).ToList().OrDefault(f, defaultValue: new List()))
+ .RuleFor(c => c.ModelRepository, f => f.PickRandom(repositories));
Catalog SeededCatalog(int seed) => fakeCatalogs.UseSeed(seed).Generate();
var catalogs = catalogRange.Select(SeededCatalog);
context.Catalogs.AddRange(catalogs);
diff --git a/tests/RepositoryCrawlerTest.cs b/tests/RepositoryCrawlerTest.cs
index c4faffc..0ed00e8 100644
--- a/tests/RepositoryCrawlerTest.cs
+++ b/tests/RepositoryCrawlerTest.cs
@@ -17,33 +17,37 @@ public class RepositoryCrawlerTest
private Mock httpClientFactory;
private RepositoryCrawler repositoryCrawler;
private MockHttpMessageHandler mockHttp;
+ private Dictionary mockRequests;
[TestInitialize]
public void Initialize()
{
mockHttp = new MockHttpMessageHandler();
- SetupHttpMockFiles();
+ mockRequests = SetupHttpMockFiles();
SetupRepositoryCrawlerInstance(mockHttp.ToHttpClient());
}
- private void SetupHttpMockFiles()
+ private Dictionary SetupHttpMockFiles()
{
+ var mockRequests = new Dictionary();
foreach (var dir in Directory.GetDirectories("./Testdata"))
{
foreach (var file in Directory.GetFiles(dir))
{
- mockHttp
- .When($"https://{Path.GetFileName(dir)}/{Path.GetFileName(file)}")
- .Respond("application/xml", new FileStream(file, FileMode.Open, FileAccess.Read));
-
- mockHttp
- .When(HttpMethod.Head, $"https://{Path.GetFileName(dir)}/")
- .Respond(HttpStatusCode.OK);
+ var url = $"https://{Path.GetFileName(dir)}/{Path.GetFileName(file)}";
+ mockRequests.Add(url, mockHttp
+ .When(url)
+ .Respond("application/xml", new FileStream(file, FileMode.Open, FileAccess.Read)));
}
+
+ mockHttp
+ .When(HttpMethod.Head, $"https://{Path.GetFileName(dir)}/")
+ .Respond(HttpStatusCode.OK);
}
mockHttp.Fallback.Respond(HttpStatusCode.NotFound);
+ return mockRequests;
}
private void SetupRepositoryCrawlerInstance(HttpClient httpClient)
@@ -94,7 +98,7 @@ public async Task CrawlerProducesWarningLogsIfIlidataXmlIsNotFound()
.AssertContains("https://models.multiparent.testdata/")
.AssertContains("https://models.geo.admin.testdata/")
.AssertCount(3);
- loggerMock.Verify(LogLevel.Warning, "Could not analyse https://models.interlis.testdata/ilidata.xml.", Times.Once());
+ loggerMock.Verify(LogLevel.Warning, "Could not analyse https://models.interlis.testdata/ilidata.xml.", Times.Once());
}
private void AssertModelsInterlisCh(Repository repository)
@@ -234,7 +238,7 @@ private void AssertModelsMultiparentCh(Repository repository)
.AssertSingleItem(ps => "https://models.geo.admin.testdata/".Equals(ps.HostNameId, StringComparison.OrdinalIgnoreCase), AssertModelsGeoAdminCh)
.AssertCount(2);
repository.Models
- .AssertCount(3);
+ .AssertCount(7);
repository.Catalogs
.AssertCount(0);
}
@@ -267,18 +271,59 @@ public async Task CrawlerCompletesMissingMD5()
{
var result = await repositoryCrawler.CrawlModelRepositories(new RepositoryCrawlerOptions { RootRepositoryUri = "https://models.multiparent.testdata" });
Assert.IsNotNull(result);
+ result.AssertCount(1).Single().Value.Models.AssertCount(7);
+
+ await repositoryCrawler.FetchInterlisFiles(Enumerable.Empty(), result.Values);
+ Assert.IsNotNull(result);
result.AssertCount(1);
result.AssertSingleItem("https://models.multiparent.testdata/", repository =>
{
repository.Models
- .AssertCount(3)
+ .AssertCount(7)
.AssertSingleItem(m => m.Name == "Test_Model_Without_MD5", m => Assert.AreEqual("EB137F3B28D3D06C41F20237886A8B41", m.MD5))
.AssertSingleItem(m => m.Name == "Test_Model_With_Empty_MD5", m => Assert.AreEqual("EB137F3B28D3D06C41F20237886A8B41", m.MD5))
- .AssertSingleItem(m => m.Name == "Test_Model_Without_MD5_And_Invalid_File", m => Assert.AreEqual(null, m.MD5));
+ .AssertSingleItem(m => m.Name == "Test_Model_Without_MD5_And_Invalid_File", m => Assert.AreEqual(null, m.MD5))
+ .AssertSingleItem(m => m.Name == "Test_Model_With_Correct_MD5", m => Assert.AreEqual("eb137f3b28d3d06c41f20237886a8b41", m.MD5))
+ .AssertSingleItem(m => m.Name == "Test_Model_With_Wrong_MD5", m => Assert.AreEqual("85d9577a5d8d9115484cdf2c0917c802", m.MD5))
+ .AssertSingleItem(m => m.Name == "TwoModelsInOneFile_Model1", m => Assert.AreEqual("17dd3681a880848baef146904991c36b", m.MD5))
+ .AssertSingleItem(m => m.Name == "TwoModelsInOneFile_Model2", m => Assert.AreEqual("17dd3681a880848baef146904991c36b", m.MD5));
});
}
+ [TestMethod]
+ public async Task CrawlerFetchesFileOnce()
+ {
+ var result = await repositoryCrawler.CrawlModelRepositories(new RepositoryCrawlerOptions { RootRepositoryUri = "https://models.multiparent.testdata" });
+ Assert.IsNotNull(result);
+ result.AssertCount(1).Single().Value.Models.AssertCount(7);
+
+ await repositoryCrawler.FetchInterlisFiles(Enumerable.Empty(), result.Values);
+ Assert.IsNotNull(result);
+ result.AssertCount(1);
+
+ Assert.AreEqual(1, mockHttp.GetMatchCount(mockRequests["https://models.multiparent.testdata/TwoModelsInOneFile.ili"]));
+ Assert.AreEqual(3, mockHttp.GetMatchCount(mockRequests["https://models.multiparent.testdata/TestModel.ili"]), "Missing or wrong MD5 hashes in ilimodels.xml lead to refetches.");
+ }
+
+ [TestMethod]
+ public async Task CrawlerUsesExistingFile()
+ {
+ var result = await repositoryCrawler.CrawlModelRepositories(new RepositoryCrawlerOptions { RootRepositoryUri = "https://models.multiparent.testdata" });
+ Assert.IsNotNull(result);
+ result.AssertCount(1).Single().Value.Models.AssertCount(7);
+
+ var expectedContent = "Expected Content NISECTIOUSIS";
+ await repositoryCrawler.FetchInterlisFiles([new InterlisFile { MD5 = "17DD3681A880848BAEF146904991C36B", Content = expectedContent }], result.Values);
+ Assert.IsNotNull(result);
+ result.AssertCount(1);
+
+ Assert.AreEqual(0, mockHttp.GetMatchCount(mockRequests["https://models.multiparent.testdata/TwoModelsInOneFile.ili"]));
+ result.Single().Value.Models
+ .AssertSingleItem(m => m.Name == "TwoModelsInOneFile_Model1", m => Assert.AreEqual(expectedContent, m.FileContent.Content))
+ .AssertSingleItem(m => m.Name == "TwoModelsInOneFile_Model2", m => Assert.AreEqual(expectedContent, m.FileContent.Content));
+ }
+
[TestMethod]
public async Task ReplacedCatalogsReferencedByPrecurserVersionAreDiscarded()
{
diff --git a/tests/Testdata/models.multiparent.testdata/TwoModelsInOneFile.ili b/tests/Testdata/models.multiparent.testdata/TwoModelsInOneFile.ili
new file mode 100644
index 0000000..edcf039
--- /dev/null
+++ b/tests/Testdata/models.multiparent.testdata/TwoModelsInOneFile.ili
@@ -0,0 +1,7 @@
+INTERLIS 2.4;
+
+MODEL TwoModelsInOneFile_Model1 (en) AT "mailto:noreply@localhost" VERSION "2024_07_18" =
+END TwoModelsInOneFile_Model1.
+
+MODEL TwoModelsInOneFile_Model2 (en) AT "mailto:noreply@localhost" VERSION "2024_07_18" =
+END TwoModelsInOneFile_Model2.
diff --git a/tests/Testdata/models.multiparent.testdata/ilimodels.xml b/tests/Testdata/models.multiparent.testdata/ilimodels.xml
index 36c1d2d..fbe1143 100644
--- a/tests/Testdata/models.multiparent.testdata/ilimodels.xml
+++ b/tests/Testdata/models.multiparent.testdata/ilimodels.xml
@@ -33,6 +33,42 @@
en
2022-08-17
+
+ Test_Model_With_Correct_MD5
+ ili2_3
+ TestModel.ili
+ 2022-08-17
+ en
+ 2022-08-17
+ eb137f3b28d3d06c41f20237886a8b41
+
+
+ Test_Model_With_Wrong_MD5
+ ili2_3
+ TestModel.ili
+ 2022-08-17
+ en
+ 2022-08-17
+ 85d9577a5d8d9115484cdf2c0917c802
+
+
+ TwoModelsInOneFile_Model1
+ ili2_4
+ TwoModelsInOneFile.ili
+ 2024_07_18
+ en
+ 2024-07-18
+ 17dd3681a880848baef146904991c36b
+
+
+ TwoModelsInOneFile_Model2
+ ili2_4
+ TwoModelsInOneFile.ili
+ 2024_07_18
+ en
+ 2024-07-18
+ 17dd3681a880848baef146904991c36b
+