Skip to content

Commit

Permalink
Merge back.
Browse files Browse the repository at this point in the history
  • Loading branch information
Ghislain Fourny committed Feb 28, 2024
2 parents 06c7519 + bd5a9ff commit b1e427e
Show file tree
Hide file tree
Showing 208 changed files with 7,508 additions and 1,845 deletions.
2 changes: 1 addition & 1 deletion docs/HTTPServer.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ With the HTTP server running, if you have installed Python and Jupyter notebooks

!pip install rumbledb
%load_ext rumbledb
%env RUMBLEDB_SERVER=http://locahost:8001/jsoniq
%env RUMBLEDB_SERVER=http://localhost:8001/jsoniq

Where, of course, you need to adapt the port (8001) to the one you picked previously.

Expand Down
2 changes: 1 addition & 1 deletion docs/install.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ You can try a few more queries.
>>>
( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)

This is it. RumbleDB is step and ready to go locally. You can now move on to a JSONiq tutorial. A RumbleDB tutorial will also follow soon.
This is it. RumbleDB is setup and ready to go locally. You can now move on to a JSONiq tutorial. A RumbleDB tutorial will also follow soon.

## Running on a cluster

Expand Down
14 changes: 7 additions & 7 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -200,31 +200,31 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.2.4</version>
<version>3.4.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
<version>3.2.4</version>
<version>3.4.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_2.12</artifactId>
<version>3.2.4</version>
<version>3.4.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>3.3.1</version>
<version>3.3.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-avro_2.12</artifactId>
<version>3.2.4</version>
<version>3.4.2</version>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
Expand All @@ -250,12 +250,12 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.6</version>
<version>1.10.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.9</version>
<version>3.12.0</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
Expand Down
55 changes: 55 additions & 0 deletions src/main/java/org/rumbledb/api/Item.java
Original file line number Diff line number Diff line change
Expand Up @@ -558,6 +558,35 @@ default void putItem(Item item) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Add an item at index i, if it is an array.
*
* @param item an item.
* @param i an integer.
*/
default void putItemAt(Item item, int i) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Add all items in items at index i, if it is an array.
*
* @param items a list of items.
* @param i an integer.
*/
default void putItemsAt(List<Item> items, int i) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Remove the item at index i, if it is an array.
*
* @param i an integer.
*/
default void removeItemAt(int i) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Adds a value pair, if it is an array item.
*
Expand All @@ -577,6 +606,15 @@ default void putItemByKey(String key, Item value) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Removes a key-value pair, if it is an object item.
*
* @param key a key.
*/
default void removeItemByKey(String key) {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Adds a key-value pair, if it is an object item. The value is lazily computed.
*
Expand Down Expand Up @@ -648,6 +686,23 @@ default boolean isNaN() {
throw new UnsupportedOperationException("Operation not defined for type " + this.getDynamicType());
}

/**
* Returns the mutability level of the item.
*
* @return an int representing nestedness of the item inside transform expressions.
*/
default int getMutabilityLevel() {
return 0;
}

/**
* Sets the mutability level of the item.
*
* @param mutabilityLevel the new mutability level.
*/
default void setMutabilityLevel(int mutabilityLevel) {
}

/**
* Tests for logical equality. The semantics are that of the eq operator.
*
Expand Down
17 changes: 17 additions & 0 deletions src/main/java/org/rumbledb/api/Rumble.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import org.rumbledb.context.DynamicContext;
import org.rumbledb.expressions.module.MainModule;
import org.rumbledb.runtime.RuntimeIterator;
import org.rumbledb.runtime.update.PendingUpdateList;
import sparksoniq.spark.SparkSessionManager;

/**
Expand Down Expand Up @@ -50,6 +51,14 @@ public SequenceOfItems runQuery(String query) {
mainModule,
this.configuration
);

if (iterator.isUpdating()) {
PendingUpdateList pul = iterator.getPendingUpdateList(dynamicContext);
pul.applyUpdates(iterator.getMetadata());
}

System.err.println("final iterator is: " + iterator.isUpdating());

return new SequenceOfItems(iterator, dynamicContext, this.configuration);
}

Expand All @@ -70,6 +79,14 @@ public SequenceOfItems runQuery(URI location) throws IOException {
mainModule,
this.configuration
);

if (iterator.isUpdating()) {
PendingUpdateList pul = iterator.getPendingUpdateList(dynamicContext);
pul.applyUpdates(iterator.getMetadata());
}

System.err.println("final iterator is: " + iterator.isUpdating());

return new SequenceOfItems(iterator, dynamicContext, this.configuration);
}

Expand Down
17 changes: 17 additions & 0 deletions src/main/java/org/rumbledb/compiler/DynamicContextVisitor.java
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,23 @@ public DynamicContext visitFunctionDeclaration(FunctionDeclaration declaration,
return defaultAction(expression, argument);
}

// @Override
// public DynamicContext visitTransformExpression(TransformExpression expression, DynamicContext argument) {
//
// for (CopyDeclaration copyDecl : expression.getCopyDeclarations()) {
// Expression child = copyDecl.getSourceExpression();
// this.visit(child, argument);
// RuntimeIterator iterator = VisitorHelpers.generateRuntimeIterator(child, this.configuration);
// iterator.bindToVariableInDynamicContext(argument, copyDecl.getVariableName(), argument);
// }
//
// this.visit(expression.getModifyExpression(), argument);
//
// this.visit(expression.getReturnExpression(), argument);
//
// return argument;
// }

@Override
public DynamicContext visitVariableDeclaration(VariableDeclaration variableDeclaration, DynamicContext argument) {
Name name = variableDeclaration.getVariableName();
Expand Down
21 changes: 21 additions & 0 deletions src/main/java/org/rumbledb/compiler/ExecutionModeVisitor.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@
import org.rumbledb.expressions.primary.VariableReferenceExpression;
import org.rumbledb.expressions.typing.TreatExpression;
import org.rumbledb.expressions.typing.ValidateTypeExpression;
import org.rumbledb.expressions.update.CopyDeclaration;
import org.rumbledb.expressions.update.TransformExpression;
import org.rumbledb.items.ItemFactory;
import org.rumbledb.runtime.misc.RangeOperationIterator;
import org.rumbledb.types.BuiltinTypesCatalogue;
Expand Down Expand Up @@ -611,6 +613,25 @@ public StaticContext visitVariableDeclaration(VariableDeclaration variableDeclar
return argument;
}

@Override
public StaticContext visitTransformExpression(TransformExpression expression, StaticContext argument) {
StaticContext innerContext = expression.getStaticContext();
for (CopyDeclaration copyDecl : expression.getCopyDeclarations()) {
this.visit(copyDecl.getSourceExpression(), copyDecl.getSourceExpression().getStaticContext());
// first pass.
innerContext.setVariableStorageMode(
copyDecl.getVariableName(),
ExecutionMode.LOCAL
);
}
expression.setHighestExecutionMode(ExecutionMode.LOCAL);

this.visit(expression.getModifyExpression(), innerContext);
this.visit(expression.getReturnExpression(), innerContext);

return argument;
}

@Override
public StaticContext visitProlog(Prolog prolog, StaticContext argument) {
visitDescendants(prolog, argument);
Expand Down
Loading

0 comments on commit b1e427e

Please sign in to comment.