docker logs sam-prometheus-1

level=info ts=2022-02-28T16:14:51.209Z caller=main.go:333 msg="Starting Prometheus" version="(version=2.17.1, branch=HEAD, revision=ae041f97cfc6f43494bed65ec4ea4e3a0cf2ac69)"

level=info ts=2022-02-28T16:14:51.210Z caller=main.go:334 build_context="(go=go1.13.9, user=root@806b02dfe114, date=20200326-16:18:19)"

level=info ts=2022-02-28T16:14:51.210Z caller=main.go:335 host_details="(Linux 5.10.76-linuxkit #1 SMP Mon Nov 8 10:21:19 UTC 2021 x86_64 aa300025820c (none))"

level=info ts=2022-02-28T16:14:51.210Z caller=main.go:336 fd_limits="(soft=1048576, hard=1048576)"

level=info ts=2022-02-28T16:14:51.210Z caller=main.go:337 vm_limits="(soft=unlimited, hard=unlimited)"

level=info ts=2022-02-28T16:14:51.215Z caller=web.go:514 component=web msg="Start listening for connections" address=0.0.0.0:9090

level=info ts=2022-02-28T16:14:51.214Z caller=main.go:667 msg="Starting TSDB ..."

level=info ts=2022-02-28T16:14:51.225Z caller=head.go:575 component=tsdb msg="replaying WAL, this may take awhile"

level=info ts=2022-02-28T16:14:51.227Z caller=head.go:624 component=tsdb msg="WAL segment loaded" segment=0 maxSegment=0

level=info ts=2022-02-28T16:14:51.227Z caller=head.go:627 component=tsdb msg="WAL replay completed" duration=1.4381ms

level=info ts=2022-02-28T16:14:51.228Z caller=main.go:683 fs_type=EXT4_SUPER_MAGIC

level=info ts=2022-02-28T16:14:51.228Z caller=main.go:684 msg="TSDB started"

level=info ts=2022-02-28T16:14:51.228Z caller=main.go:788 msg="Loading configuration file" filename=/config/isc_prometheus.yml

ts=2022-02-28T16:14:51.234Z caller=dedupe.go:112 component=remote level=info remote_name=43bf89 url=http://iris:52773/api/sam/private/db/write msg="starting WAL watcher" queue=43bf89

ts=2022-02-28T16:14:51.234Z caller=dedupe.go:112 component=remote level=info remote_name=43bf89 url=http://iris:52773/api/sam/private/db/write msg="replaying WAL" queue=43bf89

level=info ts=2022-02-28T16:14:51.239Z caller=main.go:816 msg="Completed loading of configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:14:51.239Z caller=main.go:635 msg="Server is ready to receive web requests."

ts=2022-02-28T16:14:59.846Z caller=dedupe.go:112 component=remote level=info remote_name=43bf89 url=http://iris:52773/api/sam/private/db/write msg="done replaying WAL" duration=8.6116179s

level=info ts=2022-02-28T16:36:46.583Z caller=main.go:788 msg="Loading configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:36:46.592Z caller=main.go:816 msg="Completed loading of configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:14.966Z caller=main.go:788 msg="Loading configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:14.974Z caller=main.go:816 msg="Completed loading of configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:37.074Z caller=main.go:788 msg="Loading configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:37.081Z caller=main.go:816 msg="Completed loading of configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:53.001Z caller=main.go:788 msg="Loading configuration file" filename=/config/isc_prometheus.yml

level=info ts=2022-02-28T16:37:53.007Z caller=main.go:816 msg="Completed loading of configuration file" filename=/config/isc_prometheus.yml
docker exec -ti -u root sam-prometheus-1 sh

/prometheus # wget http://172.20.10.3:50004/api/monitor/metrics -O iris-a-metrics

Connecting to 172.20.10.3:50004 (172.20.10.3:50004)

saving to 'iris-a-metrics'

iris-a-metrics       100% |*************************************************************************************************|  8584  0:00:00 ETA

'iris-a-metrics' saved

/prometheus # wget http://172.20.10.3:50014/api/monitor/metrics -O iris-b-metrics

Connecting to 172.20.10.3:50014 (172.20.10.3:50014)

saving to 'iris-b-metrics'

iris-b-metrics       100% |*************************************************************************************************|  7901  0:00:00 ETA

'iris-b-metrics' saved

/prometheus # wget http://172.20.10.3:52773/api/monitor/metrics -O iris-health-metrics

Connecting to 172.20.10.3:52773 (172.20.10.3:52773)

saving to 'iris-health-metrics'

iris-health-metrics  100% |*************************************************************************************************| 24567  0:00:00 ETA

'iris-health-metrics' saved

/prometheus # wget http://172.20.10.3:52774/api/monitor/metrics -O iris-metrics

Connecting to 172.20.10.3:52774 (172.20.10.3:52774)

saving to 'iris-metrics'

iris-metrics         100% |*************************************************************************************************|  6110  0:00:00 ETA

'iris-metrics' saved

I'm running the very last version of Docker : 4.5.0 (74594)

 
 docker version

After a reboot I'm now again able to reach 1 local instance (out of 2) and 0 containers + the IRIS-SAM instance.

 

 
docker-compose.yml
 
docker ps
 

 isc_prometheus.yml

Thanks Dmitry for your reply.

Actually, I know all of this ; that's why I don't understand why it's not working any more...

  1. gh repo clone intersystems-community/sam
  2. cd sam
  3. tar xvzf sam-1.0.0.115-unix.tar.gz
  4. cd sam-1.0.0.115-unix
  5. ./start.sh

Then I create a cluster + a target on my local instance (non-container) :

iris list irishealth                

Configuration 'IRISHEALTH'

directory:    /Users/guilbaud/is/irishealth

versionid:    2021.2.0.649.0

datadir:      /Users/guilbaud/is/irishealth

conf file:    iris.cpf  (SuperServer port = 61773, WebServer = 52773)

status:       running, since Fri Feb 25 15:35:32 2022

state:        ok

product:      InterSystems IRISHealth

I check that /api/monitor/metrics runs well :

curl http://127.0.0.1:52773/api/monitor/metrics -o metrics

  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current

                                 Dload  Upload   Total   Spent    Left  Speed

100 17634  100 17634    0     0  14174      0  0:00:01  0:00:01 --:--:-- 14383

 

Thanks Robert for your comment.
 

Merging globals is exactly what the toArchive method does here :


 


Class data.archive.person Extends (%Persistent, data.current.person)
{

Parameter DEFAULTGLOBAL = "^off.person";

/// Description
ClassMethod archive(purgeArchive As %Integer = 0, purgeSource As %Integer = 0) As %Status
{
    set sc = $$$OK , tableName = ""
    set (archived,archivedErrors, severity) = 0

    set sourceClassName = $PIECE(##class(%Dictionary.ClassDefinition).%OpenId(..%ClassName(1)).Super,",",2)
    set targetClassName = ..%ClassName(1)

    set sourceClass = ##class(%Dictionary.ClassDefinition).%OpenId(sourceClassName) 
    set targetClass = ##class(%Dictionary.ClassDefinition).%OpenId(targetClassName)

    set sourceDataLocation = sourceClass.Storages.GetAt(1).DataLocation
    set sourceIndexLocation = sourceClass.Storages.GetAt(1).IndexLocation
    set sourceStreamLocation = sourceClass.Storages.GetAt(1).StreamLocation

    set targetDataLocation = targetClass.Storages.GetAt(1).DataLocation
    set targetIndexLocation = targetClass.Storages.GetAt(1).IndexLocation
    set targetStreamLocation = targetClass.Storages.GetAt(1).StreamLocation

    set tableName = $$$CLASSsqlschemaname($$$gWRK,sourceClassName) _"."_  $$$CLASSsqltablename($$$gWRK,sourceClassName)

    if $ISOBJECT(sourceClass) 
     & $ISOBJECT(targetClass)
     & tableName '= "" {
        if $ISOBJECT(sourceClass.Storages.GetAt(1)) 
         & $ISOBJECT(targetClass.Storages.GetAt(1))
         {
            set tStatement=##class(%SQL.Statement).%New(1) 
            kill sql
            set sql($i(sql)) = "SELECT" 
            set sql($i(sql)) = "id"  
            set sql($i(sql)) = "FROM"
            set sql($i(sql)) = tableName
            set sc = tStatement.%Prepare(.sql) 
            set result = tStatement.%Execute()

            kill:purgeArchive @targetDataLocation, @targetIndexLocation, @targetStreamLocation 

            while result.%Next() {
                set source = $CLASSMETHOD(sourceClassName,"%OpenId",result.%Get("id"))

                if $ISOBJECT(source) {
                    set archive = $CLASSMETHOD(targetClassName,"%New")

                    for i = 1:1:sourceClass.Properties.Count() {
                        set propertyName = sourceClass.Properties.GetAt(i).Name
                        set $PROPERTY(archive,propertyName) = $PROPERTY(source,propertyName)
                    }

                    set sc = archive.%Save()
                    if sc {
                        set archived = archived + 1
                    } else {
                        set archivedErrors = archivedErrors + 1
                    }
                }
            }

            kill:purgeSource @sourceDataLocation, @sourceIndexLocation, @sourceStreamLocation

            set msg ="archive data from " _ sourceClassName _ " into "_ targetClassName _ " result:" _ archived _ " archived (errors:" _ archivedErrors _ ")"

       } else {
            set severity = 1
            set msg = "ERROR WHILE ARCHIVING " _ sourceClassName _ " in "_ targetClassName _ " : " _ " classes have not storage definition"
        }
    } else {
        set severity = 1
        set msg = "ERROR WHILE ARCHIVING " _ sourceClassName _ " in "_ targetClassName _ " : " _ " classes not found in %Dictionary.ClassDefinition"
    }
    do ##class(%SYS.System).WriteToConsoleLog(msg,0,severity)
    Return sc
}

ClassMethod toArchive(purgeArchive As %Integer = 0, purgeSource As %Integer = 0) As %Status
{
    set sc=$$$OK

    set sourceClassName = $PIECE(##class(%Dictionary.ClassDefinition).%OpenId(..%ClassName(1)).Super,",",2)
    set targetClassName = ..%ClassName(1)
    set sourceClass = ##class(%Dictionary.ClassDefinition).%OpenId(sourceClassName) 
    set targetClass = ##class(%Dictionary.ClassDefinition).%OpenId(targetClassName)

    if $ISOBJECT(sourceClass) 
     & $ISOBJECT(targetClass) {
        if $ISOBJECT(sourceClass.Storages.GetAt(1)) 
         & $ISOBJECT(targetClass.Storages.GetAt(1))
         {
    
            set sourceDataLocation = sourceClass.Storages.GetAt(1).DataLocation
            set sourceIndexLocation = sourceClass.Storages.GetAt(1).IndexLocation
            set sourceStreamLocation = sourceClass.Storages.GetAt(1).StreamLocation

            set targetDataLocation = targetClass.Storages.GetAt(1).DataLocation
            set targetIndexLocation = targetClass.Storages.GetAt(1).IndexLocation
            set targetStreamLocation = targetClass.Storages.GetAt(1).StreamLocation

            kill:purgeArchive @targetDataLocation, @targetIndexLocation, @targetStreamLocation 

            merge @targetDataLocation = @sourceDataLocation
            merge @targetIndexLocation = @sourceIndexLocation
            merge @targetStreamLocation = @sourceStreamLocation

            set ^mergeTrace($i(^mergeTrace)) = $lb($zdt($h,3),sourceDataLocation)

            kill:purgeSource @sourceDataLocation, @sourceIndexLocation, @sourceStreamLocation

            set severity = 0
            set msg = "ARCHIVING " _ sourceClassName _ " in "_ targetClassName _ " SUCCESSFULLY"
                    

        } else {
            set severity = 1
            set msg = "ERROR WHILE ARCHIVING " _ sourceClassName _ " in "_ targetClassName _ " : " _ " classes have not storage definition"
        }
    } else {
        set severity = 1
        set msg = "ERROR WHILE ARCHIVING " _ sourceClassName _ " in "_ targetClassName _ " : " _ " classes not found in %Dictionary.ClassDefinition"
    }
    do ##class(%SYS.System).WriteToConsoleLog(msg,0,severity)
    return sc
}

Storage Default
{
<data name="personDefaultData">
<value name="1">
<value>%%CLASSNAME</value>
</value>
<value name="2">
<value>name</value>
</value>
<value name="3">
<value>dob</value>
</value>
<value name="4">
<value>activ</value>
</value>
<value name="5">
<value>created</value>
</value>
</data>
<datalocation>^off.personD</datalocation>
<defaultdata>personDefaultData</defaultdata>
<idlocation>^off.personD</idlocation>
<indexlocation>^off.personI</indexlocation>
<streamlocation>^off.personS</streamlocation>
<type>%Storage.Persistent</type>
}

}