Function: Multi Collection Eventing

    +

    Goal: Show how to access the Data Service when Eventing is listening to multiple collections.

    • This function multiCollectionEventing demonstrates how to access the Data Service when using wildcard bindings.

    • Requires four (4) keyspaces in two buckets "rr100" and "source"

      • rr100.eventing.metadata

      • source._default._default

      • source.myscope.mycol1

      • source.myscope.mycol2

    • Needs two Bindings of type "bucket alias" (as documented in the Scriptlet).

    • Will operate on three test documents. Add them one at a time after the function is deployed.

    • Highlights the use of meta.keyspace

    • multiCollectionEventing

    • Input Data/Mutation

    • Output Log (reverse order)/Mutation

    // To run configure the settings for this Function, multiCollectionEventing, as follows:
    //
    // Setup four (4) required keyspaces in two buckets "rr100" and "source"
    //   rr100.eventing.metadata
    //   source._default._default
    //   source.myscope.mycol1
    //   source.myscope.mycol2
    //
    // Version 7.1.1+
    //   "Function Scope"
    //     *.* (or try source.* if non-privileged)
    //   "Listen to Location"
    //     source.*.*
    //   "Eventing Storage"
    //     rr100.eventing.metadata
    //   Create four (4) Bindings
    //       "binding type", "alias name...", "bucket.scope.collection",     "Access"
    //       ---------------------------------------------------------------------------
    //       "bucket alias", "alias_ro",      "source.myscope.*",            "read only"
    //       "bucket alias", "alias_rw",      "source.myscope.*",            "read and write"
    //   Deploy the Function
    //   Create the following three (3) documents one at a time and inspect the Application log each time
    //       "bucket.scope.collection"   KEY     DATA
    //       ---------------------------------------------------------------------------
    //       source._default._default    doc0    {"data": "doc0"}
    //       source.myscope.mycol1       doc1    {"data": "doc1"}
    //       source.myscope.mycol2       doc2    {"data": "doc2"}
    
    function OnUpdate(doc, meta) {
        log('>>>A IN',doc, meta);
    
        // TEST GET with hardcode keyspace
        var res1 = couchbase.get(alias_ro,{"id": "doc2", "keyspace":
            {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}});
        log('>>>B fixed read',"res1", res1);
    
        // protect against reading from something outside the alias
        if (meta.keyspace.scope_name == "myscope") {
            // TEST GET with keyspace form meta
            var res2 = couchbase.get(alias_ro,meta);
            log('>>>C read using passed meta (must be myscope)',"res2", res2);
    
            if (meta.keyspace.collection_name == "mycol2") {
                // TEST UPSERT with hardcode keyspace
                // add a field to the doc we read in res1
                res1.doc.random1 = Math.random();
                var res3 = couchbase.upsert(alias_rw,{"id": "doc2", "keyspace":
                    {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}, res1.doc)
                log('>>>D upsert',"res3", res3);
    
                // TEST REPLACE with hardcode keyspace
                res1.doc.random2 = Math.random();
                var res4 = couchbase.replace(alias_rw,{"id": "doc2", "keyspace":
                    {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}, res1.doc)
                log('>>>E replace',"res4", res4);
    
                // TEST GET with hardcode keyspace
                var res5 = couchbase.get(alias_rw,meta);
                log('>>>F get (show added fields)',"res5", res5);
    
                // TEST DELETE with hardcode keyspace (so we can TEST the insert)
                var res6 = couchbase.delete(alias_rw,{"id": "doc2", "keyspace":
                    {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}})
                log('>>>G delete',"res6", res6);
    
                // TEST INSERT with hardcode keyspace
                // now remove the added items put thnigs back
                delete res1.doc.random1;
                delete res1.doc.random2;
                var res7 = couchbase.insert(alias_rw,{"id": "doc2", "keyspace":
                    {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}, res1.doc)
                log('>>>H upsert',"res7", res7);
            }
        }
    }

    We want to create a small test doc set of three documents

    Use the Query Editor to insert the above data items (you do not need an Index) add them one at a time and check the Eventing log each time.

      UPSERT INTO source._default._default (KEY,VALUE) VALUES ( "doc0",  {"data": "doc0"} );
    
      UPSERT INTO source.myscope.mycol1 (KEY,VALUE) VALUES ( "doc1",  {"data": "doc1"} );
    
      UPSERT INTO source.myscope.mycol2 (KEY,VALUE) VALUES ( "doc2",  {"data": "doc2"} );
    LOG/OUTPUT: KEY doc0
    
    2022-07-28T17:02:04.599-07:00 [INFO] ">>>B fixed read" "res1" {"error":{"code":1,"name":"LCB_KEY_ENOENT","desc":"The document key does not exist on the server","key_not_found":true},"success":false}
    
    2022-07-28T17:02:04.597-07:00 [INFO] ">>>A IN" {"data":"doc0"} {"cas":"1659052924529868800","id":"doc0","expiration":0,"flags":0,"vb":642,"seq":6,"datatype":"json","keyspace":{"bucket_name":"source","scope_name":"_default","collection_name":"_default"},"cid":0}
    
    LOG/OUTPUT: KEY doc1
    
    2022-07-28T17:03:52.902-07:00 [INFO] ">>>C read using passed meta (must be myscope)" "res2" {"doc":{"data":"doc1"},"meta":{"id":"doc1","cas":"1659053032885387264","datatype":"json"},"success":true}
    
    2022-07-28T17:03:52.901-07:00 [INFO] ">>>B fixed read" "res1" {"error":{"code":1,"name":"LCB_KEY_ENOENT","desc":"The document key does not exist on the server","key_not_found":true},"success":false}
    
    2022-07-28T17:03:52.898-07:00 [INFO] ">>>A IN" {"data":"doc1"} {"cas":"1659053032885387264","id":"doc1","expiration":0,"flags":0,"vb":389,"seq":9,"datatype":"json","keyspace":{"bucket_name":"source","scope_name":"myscope","collection_name":"mycol1"},"cid":8}
    
    LOG/OUTPUT: KEY doc2
    
    Function Log - multiCollectionEventing
    2022-07-28T17:04:37.807-07:00 [INFO] ">>>H upsert" "res7" {"meta":{"id":"doc2","cas":"1659053077807104000"},"success":true}
    
    2022-07-28T17:04:37.806-07:00 [INFO] ">>>G delete" "res6" {"meta":{"id":"doc2","cas":"1659053077806055424"},"success":true}
    
    2022-07-28T17:04:37.805-07:00 [INFO] ">>>F get (show added fields)" "res5" {"doc":{"data":"doc2","random1":0.7875783578859457,"random2":0.47914947531399843},"meta":{"id":"doc2","cas":"1659053077803827200","datatype":"json"},"success":true}
    
    2022-07-28T17:04:37.804-07:00 [INFO] ">>>E replace" "res4" {"meta":{"id":"doc2","cas":"1659053077803827200"},"success":true}
    
    2022-07-28T17:04:37.803-07:00 [INFO] ">>>D upsert" "res3" {"meta":{"id":"doc2","cas":"1659053077802516480"},"success":true}
    
    2022-07-28T17:04:37.800-07:00 [INFO] ">>>C read using passed meta (must be myscope)" "res2" {"doc":{"data":"doc2"},"meta":{"id":"doc2","cas":"1659053077704474624","datatype":"json"},"success":true}
    
    2022-07-28T17:04:37.799-07:00 [INFO] ">>>B fixed read" "res1" {"doc":{"data":"doc2"},"meta":{"id":"doc2","cas":"1659053077704474624","datatype":"json"},"success":true}
    
    2022-07-28T17:04:37.797-07:00 [INFO] ">>>A IN" {"data":"doc2"} {"cas":"1659053077704474624","id":"doc2","expiration":0,"flags":0,"vb":140,"seq":38,"datatype":"json","keyspace":{"bucket_name":"source","scope_name":"myscope","collection_name":"mycol2"},"cid":9}